UNPKG

708 kBJavaScriptView Raw
1/**
2 * videojs-contrib-hls
3 * @version 5.12.0
4 * @copyright 2017 Brightcove, Inc
5 * @license Apache-2.0
6 */
7(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.videojsContribHls = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
8/**
9 * @file ad-cue-tags.js
10 */
11'use strict';
12
13Object.defineProperty(exports, '__esModule', {
14 value: true
15});
16
17var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })();
18
19function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
20
21var _globalWindow = require('global/window');
22
23var _globalWindow2 = _interopRequireDefault(_globalWindow);
24
25/**
26 * Searches for an ad cue that overlaps with the given mediaTime
27 */
28var findAdCue = function findAdCue(track, mediaTime) {
29 var cues = track.cues;
30
31 for (var i = 0; i < cues.length; i++) {
32 var cue = cues[i];
33
34 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
35 return cue;
36 }
37 }
38 return null;
39};
40
41var updateAdCues = function updateAdCues(media, track) {
42 var offset = arguments.length <= 2 || arguments[2] === undefined ? 0 : arguments[2];
43
44 if (!media.segments) {
45 return;
46 }
47
48 var mediaTime = offset;
49 var cue = undefined;
50
51 for (var i = 0; i < media.segments.length; i++) {
52 var segment = media.segments[i];
53
54 if (!cue) {
55 // Since the cues will span for at least the segment duration, adding a fudge
56 // factor of half segment duration will prevent duplicate cues from being
57 // created when timing info is not exact (e.g. cue start time initialized
58 // at 10.006677, but next call mediaTime is 10.003332 )
59 cue = findAdCue(track, mediaTime + segment.duration / 2);
60 }
61
62 if (cue) {
63 if ('cueIn' in segment) {
64 // Found a CUE-IN so end the cue
65 cue.endTime = mediaTime;
66 cue.adEndTime = mediaTime;
67 mediaTime += segment.duration;
68 cue = null;
69 continue;
70 }
71
72 if (mediaTime < cue.endTime) {
73 // Already processed this mediaTime for this cue
74 mediaTime += segment.duration;
75 continue;
76 }
77
78 // otherwise extend cue until a CUE-IN is found
79 cue.endTime += segment.duration;
80 } else {
81 if ('cueOut' in segment) {
82 cue = new _globalWindow2['default'].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
83 cue.adStartTime = mediaTime;
84 // Assumes tag format to be
85 // #EXT-X-CUE-OUT:30
86 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
87 track.addCue(cue);
88 }
89
90 if ('cueOutCont' in segment) {
91 // Entered into the middle of an ad cue
92 var adOffset = undefined;
93 var adTotal = undefined;
94
95 // Assumes tag formate to be
96 // #EXT-X-CUE-OUT-CONT:10/30
97
98 var _segment$cueOutCont$split$map = segment.cueOutCont.split('/').map(parseFloat);
99
100 var _segment$cueOutCont$split$map2 = _slicedToArray(_segment$cueOutCont$split$map, 2);
101
102 adOffset = _segment$cueOutCont$split$map2[0];
103 adTotal = _segment$cueOutCont$split$map2[1];
104
105 cue = new _globalWindow2['default'].VTTCue(mediaTime, mediaTime + segment.duration, '');
106 cue.adStartTime = mediaTime - adOffset;
107 cue.adEndTime = cue.adStartTime + adTotal;
108 track.addCue(cue);
109 }
110 }
111 mediaTime += segment.duration;
112 }
113};
114
115exports['default'] = {
116 updateAdCues: updateAdCues,
117 findAdCue: findAdCue
118};
119module.exports = exports['default'];
120},{"global/window":32}],2:[function(require,module,exports){
121/**
122 * @file bin-utils.js
123 */
124
125/**
126 * convert a TimeRange to text
127 *
128 * @param {TimeRange} range the timerange to use for conversion
129 * @param {Number} i the iterator on the range to convert
130 */
131'use strict';
132
133Object.defineProperty(exports, '__esModule', {
134 value: true
135});
136var textRange = function textRange(range, i) {
137 return range.start(i) + '-' + range.end(i);
138};
139
140/**
141 * format a number as hex string
142 *
143 * @param {Number} e The number
144 * @param {Number} i the iterator
145 */
146var formatHexString = function formatHexString(e, i) {
147 var value = e.toString(16);
148
149 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
150};
151var formatAsciiString = function formatAsciiString(e) {
152 if (e >= 0x20 && e < 0x7e) {
153 return String.fromCharCode(e);
154 }
155 return '.';
156};
157
158/**
159 * Creates an object for sending to a web worker modifying properties that are TypedArrays
160 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
161 *
162 * @param {Object} message
163 * Object of properties and values to send to the web worker
164 * @return {Object}
165 * Modified message with TypedArray values expanded
166 * @function createTransferableMessage
167 */
168var createTransferableMessage = function createTransferableMessage(message) {
169 var transferable = {};
170
171 Object.keys(message).forEach(function (key) {
172 var value = message[key];
173
174 if (ArrayBuffer.isView(value)) {
175 transferable[key] = {
176 bytes: value.buffer,
177 byteOffset: value.byteOffset,
178 byteLength: value.byteLength
179 };
180 } else {
181 transferable[key] = value;
182 }
183 });
184
185 return transferable;
186};
187
188/**
189 * Returns a unique string identifier for a media initialization
190 * segment.
191 */
192var initSegmentId = function initSegmentId(initSegment) {
193 var byterange = initSegment.byterange || {
194 length: Infinity,
195 offset: 0
196 };
197
198 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
199};
200
201/**
202 * utils to help dump binary data to the console
203 */
204var utils = {
205 hexDump: function hexDump(data) {
206 var bytes = Array.prototype.slice.call(data);
207 var step = 16;
208 var result = '';
209 var hex = undefined;
210 var ascii = undefined;
211
212 for (var j = 0; j < bytes.length / step; j++) {
213 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
214 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
215 result += hex + ' ' + ascii + '\n';
216 }
217 return result;
218 },
219 tagDump: function tagDump(tag) {
220 return utils.hexDump(tag.bytes);
221 },
222 textRanges: function textRanges(ranges) {
223 var result = '';
224 var i = undefined;
225
226 for (i = 0; i < ranges.length; i++) {
227 result += textRange(ranges, i) + ' ';
228 }
229 return result;
230 },
231 createTransferableMessage: createTransferableMessage,
232 initSegmentId: initSegmentId
233};
234
235exports['default'] = utils;
236module.exports = exports['default'];
237},{}],3:[function(require,module,exports){
238"use strict";
239
240Object.defineProperty(exports, "__esModule", {
241 value: true
242});
243exports["default"] = {
244 GOAL_BUFFER_LENGTH: 30,
245 MAX_GOAL_BUFFER_LENGTH: 60,
246 GOAL_BUFFER_LENGTH_RATE: 1,
247 // A fudge factor to apply to advertised playlist bitrates to account for
248 // temporary flucations in client bandwidth
249 BANDWIDTH_VARIANCE: 1.2,
250 // How much of the buffer must be filled before we consider upswitching
251 BUFFER_LOW_WATER_LINE: 0,
252 MAX_BUFFER_LOW_WATER_LINE: 30,
253 BUFFER_LOW_WATER_LINE_RATE: 1
254};
255module.exports = exports["default"];
256},{}],4:[function(require,module,exports){
257'use strict';
258
259Object.defineProperty(exports, '__esModule', {
260 value: true
261});
262
263function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
264
265var _globalWindow = require('global/window');
266
267var _globalWindow2 = _interopRequireDefault(_globalWindow);
268
269var _aesDecrypter = require('aes-decrypter');
270
271var _binUtils = require('./bin-utils');
272
273/**
274 * Our web worker interface so that things can talk to aes-decrypter
275 * that will be running in a web worker. the scope is passed to this by
276 * webworkify.
277 *
278 * @param {Object} self
279 * the scope for the web worker
280 */
281var DecrypterWorker = function DecrypterWorker(self) {
282 self.onmessage = function (event) {
283 var data = event.data;
284 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
285 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
286 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
287
288 /* eslint-disable no-new, handle-callback-err */
289 new _aesDecrypter.Decrypter(encrypted, key, iv, function (err, bytes) {
290 _globalWindow2['default'].postMessage((0, _binUtils.createTransferableMessage)({
291 source: data.source,
292 decrypted: bytes
293 }), [bytes.buffer]);
294 });
295 /* eslint-enable */
296 };
297};
298
299exports['default'] = function (self) {
300 return new DecrypterWorker(self);
301};
302
303module.exports = exports['default'];
304},{"./bin-utils":2,"aes-decrypter":25,"global/window":32}],5:[function(require,module,exports){
305(function (global){
306/**
307 * @file master-playlist-controller.js
308 */
309'use strict';
310
311Object.defineProperty(exports, '__esModule', {
312 value: true
313});
314
315var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
316
317var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
318
319function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
320
321function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
322
323function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
324
325var _playlistLoader = require('./playlist-loader');
326
327var _playlistLoader2 = _interopRequireDefault(_playlistLoader);
328
329var _segmentLoader = require('./segment-loader');
330
331var _segmentLoader2 = _interopRequireDefault(_segmentLoader);
332
333var _vttSegmentLoader = require('./vtt-segment-loader');
334
335var _vttSegmentLoader2 = _interopRequireDefault(_vttSegmentLoader);
336
337var _ranges = require('./ranges');
338
339var _ranges2 = _interopRequireDefault(_ranges);
340
341var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
342
343var _videoJs2 = _interopRequireDefault(_videoJs);
344
345var _adCueTags = require('./ad-cue-tags');
346
347var _adCueTags2 = _interopRequireDefault(_adCueTags);
348
349var _syncController = require('./sync-controller');
350
351var _syncController2 = _interopRequireDefault(_syncController);
352
353var _videojsContribMediaSourcesEs5CodecUtils = require('videojs-contrib-media-sources/es5/codec-utils');
354
355var _webworkify = require('webworkify');
356
357var _webworkify2 = _interopRequireDefault(_webworkify);
358
359var _decrypterWorker = require('./decrypter-worker');
360
361var _decrypterWorker2 = _interopRequireDefault(_decrypterWorker);
362
363var _config = require('./config');
364
365var _config2 = _interopRequireDefault(_config);
366
367var _utilCodecsJs = require('./util/codecs.js');
368
369var _mediaGroups = require('./media-groups');
370
371var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
372
373var Hls = undefined;
374
375// Default codec parameters if none were provided for video and/or audio
376var defaultCodecs = {
377 videoCodec: 'avc1',
378 videoObjectTypeIndicator: '.4d400d',
379 // AAC-LC
380 audioProfile: '2'
381};
382
383// SegmentLoader stats that need to have each loader's
384// values summed to calculate the final value
385var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
386var sumLoaderStat = function sumLoaderStat(stat) {
387 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
388};
389
390/**
391 * Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
392 * standard `avc1.<hhhhhh>`.
393 *
394 * @param codecString {String} the codec string
395 * @return {String} the codec string with old apple-style codecs replaced
396 *
397 * @private
398 */
399var mapLegacyAvcCodecs_ = function mapLegacyAvcCodecs_(codecString) {
400 return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
401 return (0, _videojsContribMediaSourcesEs5CodecUtils.translateLegacyCodecs)([match])[0];
402 });
403};
404
405exports.mapLegacyAvcCodecs_ = mapLegacyAvcCodecs_;
406/**
407 * Build a media mime-type string from a set of parameters
408 * @param {String} type either 'audio' or 'video'
409 * @param {String} container either 'mp2t' or 'mp4'
410 * @param {Array} codecs an array of codec strings to add
411 * @return {String} a valid media mime-type
412 */
413var makeMimeTypeString = function makeMimeTypeString(type, container, codecs) {
414 // The codecs array is filtered so that falsey values are
415 // dropped and don't cause Array#join to create spurious
416 // commas
417 return type + '/' + container + '; codecs="' + codecs.filter(function (c) {
418 return !!c;
419 }).join(', ') + '"';
420};
421
422/**
423 * Returns the type container based on information in the playlist
424 * @param {Playlist} media the current media playlist
425 * @return {String} a valid media container type
426 */
427var getContainerType = function getContainerType(media) {
428 // An initialization segment means the media playlist is an iframe
429 // playlist or is using the mp4 container. We don't currently
430 // support iframe playlists, so assume this is signalling mp4
431 // fragments.
432 if (media.segments && media.segments.length && media.segments[0].map) {
433 return 'mp4';
434 }
435 return 'mp2t';
436};
437
438/**
439 * Returns a set of codec strings parsed from the playlist or the default
440 * codec strings if no codecs were specified in the playlist
441 * @param {Playlist} media the current media playlist
442 * @return {Object} an object with the video and audio codecs
443 */
444var getCodecs = function getCodecs(media) {
445 // if the codecs were explicitly specified, use them instead of the
446 // defaults
447 var mediaAttributes = media.attributes || {};
448
449 if (mediaAttributes.CODECS) {
450 return (0, _utilCodecsJs.parseCodecs)(mediaAttributes.CODECS);
451 }
452 return defaultCodecs;
453};
454
455/**
456 * Calculates the MIME type strings for a working configuration of
457 * SourceBuffers to play variant streams in a master playlist. If
458 * there is no possible working configuration, an empty array will be
459 * returned.
460 *
461 * @param master {Object} the m3u8 object for the master playlist
462 * @param media {Object} the m3u8 object for the variant playlist
463 * @return {Array} the MIME type strings. If the array has more than
464 * one entry, the first element should be applied to the video
465 * SourceBuffer and the second to the audio SourceBuffer.
466 *
467 * @private
468 */
469var mimeTypesForPlaylist_ = function mimeTypesForPlaylist_(master, media) {
470 var containerType = getContainerType(media);
471 var codecInfo = getCodecs(media);
472 var mediaAttributes = media.attributes || {};
473 // Default condition for a traditional HLS (no demuxed audio/video)
474 var isMuxed = true;
475 var isMaat = false;
476
477 if (!media) {
478 // Not enough information
479 return [];
480 }
481
482 if (master.mediaGroups.AUDIO && mediaAttributes.AUDIO) {
483 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
484
485 // Handle the case where we are in a multiple-audio track scenario
486 if (audioGroup) {
487 isMaat = true;
488 // Start with the everything demuxed then...
489 isMuxed = false;
490 // ...check to see if any audio group tracks are muxed (ie. lacking a uri)
491 for (var groupId in audioGroup) {
492 if (!audioGroup[groupId].uri) {
493 isMuxed = true;
494 break;
495 }
496 }
497 }
498 }
499
500 // HLS with multiple-audio tracks must always get an audio codec.
501 // Put another way, there is no way to have a video-only multiple-audio HLS!
502 if (isMaat && !codecInfo.audioProfile) {
503 _videoJs2['default'].log.warn('Multiple audio tracks present but no audio codec string is specified. ' + 'Attempting to use the default audio codec (mp4a.40.2)');
504 codecInfo.audioProfile = defaultCodecs.audioProfile;
505 }
506
507 // Generate the final codec strings from the codec object generated above
508 var codecStrings = {};
509
510 if (codecInfo.videoCodec) {
511 codecStrings.video = '' + codecInfo.videoCodec + codecInfo.videoObjectTypeIndicator;
512 }
513
514 if (codecInfo.audioProfile) {
515 codecStrings.audio = 'mp4a.40.' + codecInfo.audioProfile;
516 }
517
518 // Finally, make and return an array with proper mime-types depending on
519 // the configuration
520 var justAudio = makeMimeTypeString('audio', containerType, [codecStrings.audio]);
521 var justVideo = makeMimeTypeString('video', containerType, [codecStrings.video]);
522 var bothVideoAudio = makeMimeTypeString('video', containerType, [codecStrings.video, codecStrings.audio]);
523
524 if (isMaat) {
525 if (!isMuxed && codecStrings.video) {
526 return [justVideo, justAudio];
527 }
528 // There exists the possiblity that this will return a `video/container`
529 // mime-type for the first entry in the array even when there is only audio.
530 // This doesn't appear to be a problem and simplifies the code.
531 return [bothVideoAudio, justAudio];
532 }
533
534 // If there is ano video codec at all, always just return a single
535 // audio/<container> mime-type
536 if (!codecStrings.video) {
537 return [justAudio];
538 }
539
540 // When not using separate audio media groups, audio and video is
541 // *always* muxed
542 return [bothVideoAudio];
543};
544
545exports.mimeTypesForPlaylist_ = mimeTypesForPlaylist_;
546/**
547 * the master playlist controller controller all interactons
548 * between playlists and segmentloaders. At this time this mainly
549 * involves a master playlist and a series of audio playlists
550 * if they are available
551 *
552 * @class MasterPlaylistController
553 * @extends videojs.EventTarget
554 */
555
556var MasterPlaylistController = (function (_videojs$EventTarget) {
557 _inherits(MasterPlaylistController, _videojs$EventTarget);
558
559 function MasterPlaylistController(options) {
560 var _this = this;
561
562 _classCallCheck(this, MasterPlaylistController);
563
564 _get(Object.getPrototypeOf(MasterPlaylistController.prototype), 'constructor', this).call(this);
565
566 var url = options.url;
567 var withCredentials = options.withCredentials;
568 var mode = options.mode;
569 var tech = options.tech;
570 var bandwidth = options.bandwidth;
571 var externHls = options.externHls;
572 var useCueTags = options.useCueTags;
573 var blacklistDuration = options.blacklistDuration;
574 var enableLowInitialPlaylist = options.enableLowInitialPlaylist;
575
576 if (!url) {
577 throw new Error('A non-empty playlist URL is required');
578 }
579
580 Hls = externHls;
581
582 this.withCredentials = withCredentials;
583 this.tech_ = tech;
584 this.hls_ = tech.hls;
585 this.mode_ = mode;
586 this.useCueTags_ = useCueTags;
587 this.blacklistDuration = blacklistDuration;
588 this.enableLowInitialPlaylist = enableLowInitialPlaylist;
589 if (this.useCueTags_) {
590 this.cueTagsTrack_ = this.tech_.addTextTrack('metadata', 'ad-cues');
591 this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
592 }
593
594 this.requestOptions_ = {
595 withCredentials: this.withCredentials,
596 timeout: null
597 };
598
599 this.mediaTypes_ = (0, _mediaGroups.createMediaTypes)();
600
601 this.mediaSource = new _videoJs2['default'].MediaSource({ mode: mode });
602
603 // load the media source into the player
604 this.mediaSource.addEventListener('sourceopen', this.handleSourceOpen_.bind(this));
605
606 this.seekable_ = _videoJs2['default'].createTimeRanges();
607 this.hasPlayed_ = function () {
608 return false;
609 };
610
611 this.syncController_ = new _syncController2['default'](options);
612 this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
613 kind: 'metadata',
614 label: 'segment-metadata'
615 }, false).track;
616
617 this.decrypter_ = (0, _webworkify2['default'])(_decrypterWorker2['default']);
618
619 var segmentLoaderSettings = {
620 hls: this.hls_,
621 mediaSource: this.mediaSource,
622 currentTime: this.tech_.currentTime.bind(this.tech_),
623 seekable: function seekable() {
624 return _this.seekable();
625 },
626 seeking: function seeking() {
627 return _this.tech_.seeking();
628 },
629 duration: function duration() {
630 return _this.mediaSource.duration;
631 },
632 hasPlayed: function hasPlayed() {
633 return _this.hasPlayed_();
634 },
635 goalBufferLength: function goalBufferLength() {
636 return _this.goalBufferLength();
637 },
638 bandwidth: bandwidth,
639 syncController: this.syncController_,
640 decrypter: this.decrypter_
641 };
642
643 // setup playlist loaders
644 this.masterPlaylistLoader_ = new _playlistLoader2['default'](url, this.hls_, this.withCredentials);
645 this.setupMasterPlaylistLoaderListeners_();
646
647 // setup segment loaders
648 // combined audio/video or just video when alternate audio track is selected
649 this.mainSegmentLoader_ = new _segmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderSettings, {
650 segmentMetadataTrack: this.segmentMetadataTrack_,
651 loaderType: 'main'
652 }), options);
653
654 // alternate audio track
655 this.audioSegmentLoader_ = new _segmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderSettings, {
656 loaderType: 'audio'
657 }), options);
658
659 this.subtitleSegmentLoader_ = new _vttSegmentLoader2['default'](_videoJs2['default'].mergeOptions(segmentLoaderSettings, {
660 loaderType: 'vtt'
661 }), options);
662
663 this.setupSegmentLoaderListeners_();
664
665 // Create SegmentLoader stat-getters
666 loaderStats.forEach(function (stat) {
667 _this[stat + '_'] = sumLoaderStat.bind(_this, stat);
668 });
669
670 this.masterPlaylistLoader_.load();
671 }
672
673 /**
674 * Register event handlers on the master playlist loader. A helper
675 * function for construction time.
676 *
677 * @private
678 */
679
680 _createClass(MasterPlaylistController, [{
681 key: 'setupMasterPlaylistLoaderListeners_',
682 value: function setupMasterPlaylistLoaderListeners_() {
683 var _this2 = this;
684
685 this.masterPlaylistLoader_.on('loadedmetadata', function () {
686 var media = _this2.masterPlaylistLoader_.media();
687 var requestTimeout = _this2.masterPlaylistLoader_.targetDuration * 1.5 * 1000;
688
689 // If we don't have any more available playlists, we don't want to
690 // timeout the request.
691 if (_this2.masterPlaylistLoader_.isLowestEnabledRendition_()) {
692 _this2.requestOptions_.timeout = 0;
693 } else {
694 _this2.requestOptions_.timeout = requestTimeout;
695 }
696
697 // if this isn't a live video and preload permits, start
698 // downloading segments
699 if (media.endList && _this2.tech_.preload() !== 'none') {
700 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
701 _this2.mainSegmentLoader_.load();
702 }
703
704 (0, _mediaGroups.setupMediaGroups)({
705 segmentLoaders: {
706 AUDIO: _this2.audioSegmentLoader_,
707 SUBTITLES: _this2.subtitleSegmentLoader_,
708 main: _this2.mainSegmentLoader_
709 },
710 tech: _this2.tech_,
711 requestOptions: _this2.requestOptions_,
712 masterPlaylistLoader: _this2.masterPlaylistLoader_,
713 mode: _this2.mode_,
714 hls: _this2.hls_,
715 master: _this2.master(),
716 mediaTypes: _this2.mediaTypes_,
717 blacklistCurrentPlaylist: _this2.blacklistCurrentPlaylist.bind(_this2)
718 });
719
720 _this2.triggerPresenceUsage_(_this2.master(), media);
721
722 try {
723 _this2.setupSourceBuffers_();
724 } catch (e) {
725 _videoJs2['default'].log.warn('Failed to create SourceBuffers', e);
726 return _this2.mediaSource.endOfStream('decode');
727 }
728 _this2.setupFirstPlay();
729
730 _this2.trigger('selectedinitialmedia');
731 });
732
733 this.masterPlaylistLoader_.on('loadedplaylist', function () {
734 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
735
736 if (!updatedPlaylist) {
737 var selectedMedia = undefined;
738
739 if (_this2.enableLowInitialPlaylist) {
740 selectedMedia = _this2.selectInitialPlaylist();
741 }
742
743 if (!selectedMedia) {
744 selectedMedia = _this2.selectPlaylist();
745 }
746
747 _this2.initialMedia_ = selectedMedia;
748 _this2.masterPlaylistLoader_.media(_this2.initialMedia_);
749 return;
750 }
751
752 if (_this2.useCueTags_) {
753 _this2.updateAdCues_(updatedPlaylist);
754 }
755
756 // TODO: Create a new event on the PlaylistLoader that signals
757 // that the segments have changed in some way and use that to
758 // update the SegmentLoader instead of doing it twice here and
759 // on `mediachange`
760 _this2.mainSegmentLoader_.playlist(updatedPlaylist, _this2.requestOptions_);
761 _this2.updateDuration();
762
763 // If the player isn't paused, ensure that the segment loader is running,
764 // as it is possible that it was temporarily stopped while waiting for
765 // a playlist (e.g., in case the playlist errored and we re-requested it).
766 if (!_this2.tech_.paused()) {
767 _this2.mainSegmentLoader_.load();
768 }
769
770 if (!updatedPlaylist.endList) {
771 (function () {
772 var addSeekableRange = function addSeekableRange() {
773 var seekable = _this2.seekable();
774
775 if (seekable.length !== 0) {
776 _this2.mediaSource.addSeekableRange_(seekable.start(0), seekable.end(0));
777 }
778 };
779
780 if (_this2.duration() !== Infinity) {
781 (function () {
782 var onDurationchange = function onDurationchange() {
783 if (_this2.duration() === Infinity) {
784 addSeekableRange();
785 } else {
786 _this2.tech_.one('durationchange', onDurationchange);
787 }
788 };
789
790 _this2.tech_.one('durationchange', onDurationchange);
791 })();
792 } else {
793 addSeekableRange();
794 }
795 })();
796 }
797 });
798
799 this.masterPlaylistLoader_.on('error', function () {
800 _this2.blacklistCurrentPlaylist(_this2.masterPlaylistLoader_.error);
801 });
802
803 this.masterPlaylistLoader_.on('mediachanging', function () {
804 _this2.mainSegmentLoader_.abort();
805 _this2.mainSegmentLoader_.pause();
806 });
807
808 this.masterPlaylistLoader_.on('mediachange', function () {
809 var media = _this2.masterPlaylistLoader_.media();
810 var requestTimeout = _this2.masterPlaylistLoader_.targetDuration * 1.5 * 1000;
811
812 // If we don't have any more available playlists, we don't want to
813 // timeout the request.
814 if (_this2.masterPlaylistLoader_.isLowestEnabledRendition_()) {
815 _this2.requestOptions_.timeout = 0;
816 } else {
817 _this2.requestOptions_.timeout = requestTimeout;
818 }
819
820 // TODO: Create a new event on the PlaylistLoader that signals
821 // that the segments have changed in some way and use that to
822 // update the SegmentLoader instead of doing it twice here and
823 // on `loadedplaylist`
824 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
825 _this2.mainSegmentLoader_.load();
826
827 _this2.tech_.trigger({
828 type: 'mediachange',
829 bubbles: true
830 });
831 });
832
833 this.masterPlaylistLoader_.on('playlistunchanged', function () {
834 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
835 var playlistOutdated = _this2.stuckAtPlaylistEnd_(updatedPlaylist);
836
837 if (playlistOutdated) {
838 // Playlist has stopped updating and we're stuck at its end. Try to
839 // blacklist it and switch to another playlist in the hope that that
840 // one is updating (and give the player a chance to re-adjust to the
841 // safe live point).
842 _this2.blacklistCurrentPlaylist({
843 message: 'Playlist no longer updating.'
844 });
845 // useful for monitoring QoS
846 _this2.tech_.trigger('playliststuck');
847 }
848 });
849
850 this.masterPlaylistLoader_.on('renditiondisabled', function () {
851 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-disabled' });
852 });
853 this.masterPlaylistLoader_.on('renditionenabled', function () {
854 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-enabled' });
855 });
856 }
857
858 /**
859 * A helper function for triggerring presence usage events once per source
860 *
861 * @private
862 */
863 }, {
864 key: 'triggerPresenceUsage_',
865 value: function triggerPresenceUsage_(master, media) {
866 var mediaGroups = master.mediaGroups || {};
867 var defaultDemuxed = true;
868 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
869
870 for (var mediaGroup in mediaGroups.AUDIO) {
871 for (var label in mediaGroups.AUDIO[mediaGroup]) {
872 var properties = mediaGroups.AUDIO[mediaGroup][label];
873
874 if (!properties.uri) {
875 defaultDemuxed = false;
876 }
877 }
878 }
879
880 if (defaultDemuxed) {
881 this.tech_.trigger({ type: 'usage', name: 'hls-demuxed' });
882 }
883
884 if (Object.keys(mediaGroups.SUBTITLES).length) {
885 this.tech_.trigger({ type: 'usage', name: 'hls-webvtt' });
886 }
887
888 if (Hls.Playlist.isAes(media)) {
889 this.tech_.trigger({ type: 'usage', name: 'hls-aes' });
890 }
891
892 if (Hls.Playlist.isFmp4(media)) {
893 this.tech_.trigger({ type: 'usage', name: 'hls-fmp4' });
894 }
895
896 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
897 this.tech_.trigger({ type: 'usage', name: 'hls-alternate-audio' });
898 }
899
900 if (this.useCueTags_) {
901 this.tech_.trigger({ type: 'usage', name: 'hls-playlist-cue-tags' });
902 }
903 }
904
905 /**
906 * Register event handlers on the segment loaders. A helper function
907 * for construction time.
908 *
909 * @private
910 */
911 }, {
912 key: 'setupSegmentLoaderListeners_',
913 value: function setupSegmentLoaderListeners_() {
914 var _this3 = this;
915
916 this.mainSegmentLoader_.on('bandwidthupdate', function () {
917 var nextPlaylist = _this3.selectPlaylist();
918 var currentPlaylist = _this3.masterPlaylistLoader_.media();
919 var buffered = _this3.tech_.buffered();
920 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - _this3.tech_.currentTime() : 0;
921
922 var bufferLowWaterLine = _this3.bufferLowWaterLine();
923
924 // If the playlist is live, then we want to not take low water line into account.
925 // This is because in LIVE, the player plays 3 segments from the end of the
926 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
927 // in those segments, a viewer will never experience a rendition upswitch.
928 if (!currentPlaylist.endList ||
929 // For the same reason as LIVE, we ignore the low water line when the VOD
930 // duration is below the max potential low water line
931 _this3.duration() < _config2['default'].MAX_BUFFER_LOW_WATER_LINE ||
932 // we want to switch down to lower resolutions quickly to continue playback, but
933 nextPlaylist.attributes.BANDWIDTH < currentPlaylist.attributes.BANDWIDTH ||
934 // ensure we have some buffer before we switch up to prevent us running out of
935 // buffer while loading a higher rendition.
936 forwardBuffer >= bufferLowWaterLine) {
937 _this3.masterPlaylistLoader_.media(nextPlaylist);
938 }
939
940 _this3.tech_.trigger('bandwidthupdate');
941 });
942 this.mainSegmentLoader_.on('progress', function () {
943 _this3.trigger('progress');
944 });
945
946 this.mainSegmentLoader_.on('error', function () {
947 _this3.blacklistCurrentPlaylist(_this3.mainSegmentLoader_.error());
948 });
949
950 this.mainSegmentLoader_.on('syncinfoupdate', function () {
951 _this3.onSyncInfoUpdate_();
952 });
953
954 this.mainSegmentLoader_.on('timestampoffset', function () {
955 _this3.tech_.trigger({ type: 'usage', name: 'hls-timestamp-offset' });
956 });
957 this.audioSegmentLoader_.on('syncinfoupdate', function () {
958 _this3.onSyncInfoUpdate_();
959 });
960
961 this.mainSegmentLoader_.on('ended', function () {
962 _this3.onEndOfStream();
963 });
964
965 this.mainSegmentLoader_.on('earlyabort', function () {
966 _this3.blacklistCurrentPlaylist({
967 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
968 }, ABORT_EARLY_BLACKLIST_SECONDS);
969 });
970
971 this.mainSegmentLoader_.on('reseteverything', function () {
972 // If playing an MTS stream, a videojs.MediaSource is listening for
973 // hls-reset to reset caption parsing state in the transmuxer
974 _this3.tech_.trigger('hls-reset');
975 });
976
977 this.mainSegmentLoader_.on('segmenttimemapping', function (event) {
978 // If playing an MTS stream in html, a videojs.MediaSource is listening for
979 // hls-segment-time-mapping update its internal mapping of stream to display time
980 _this3.tech_.trigger({
981 type: 'hls-segment-time-mapping',
982 mapping: event.mapping
983 });
984 });
985
986 this.audioSegmentLoader_.on('ended', function () {
987 _this3.onEndOfStream();
988 });
989 }
990 }, {
991 key: 'mediaSecondsLoaded_',
992 value: function mediaSecondsLoaded_() {
993 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
994 }
995
996 /**
997 * Call load on our SegmentLoaders
998 */
999 }, {
1000 key: 'load',
1001 value: function load() {
1002 this.mainSegmentLoader_.load();
1003 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
1004 this.audioSegmentLoader_.load();
1005 }
1006 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
1007 this.subtitleSegmentLoader_.load();
1008 }
1009 }
1010
1011 /**
1012 * Re-tune playback quality level for the current player
1013 * conditions. This method may perform destructive actions, like
1014 * removing already buffered content, to readjust the currently
1015 * active playlist quickly.
1016 *
1017 * @private
1018 */
1019 }, {
1020 key: 'fastQualityChange_',
1021 value: function fastQualityChange_() {
1022 var media = this.selectPlaylist();
1023
1024 if (media !== this.masterPlaylistLoader_.media()) {
1025 this.masterPlaylistLoader_.media(media);
1026
1027 this.mainSegmentLoader_.resetLoader();
1028 // don't need to reset audio as it is reset when media changes
1029 }
1030 }
1031
1032 /**
1033 * Begin playback.
1034 */
1035 }, {
1036 key: 'play',
1037 value: function play() {
1038 if (this.setupFirstPlay()) {
1039 return;
1040 }
1041
1042 if (this.tech_.ended()) {
1043 this.tech_.setCurrentTime(0);
1044 }
1045
1046 if (this.hasPlayed_()) {
1047 this.load();
1048 }
1049
1050 var seekable = this.tech_.seekable();
1051
1052 // if the viewer has paused and we fell out of the live window,
1053 // seek forward to the live point
1054 if (this.tech_.duration() === Infinity) {
1055 if (this.tech_.currentTime() < seekable.start(0)) {
1056 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
1057 }
1058 }
1059 }
1060
1061 /**
1062 * Seek to the latest media position if this is a live video and the
1063 * player and video are loaded and initialized.
1064 */
1065 }, {
1066 key: 'setupFirstPlay',
1067 value: function setupFirstPlay() {
1068 var _this4 = this;
1069
1070 var media = this.masterPlaylistLoader_.media();
1071
1072 // Check that everything is ready to begin buffering for the first call to play
1073 // If 1) there is no active media
1074 // 2) the player is paused
1075 // 3) the first play has already been setup
1076 // then exit early
1077 if (!media || this.tech_.paused() || this.hasPlayed_()) {
1078 return false;
1079 }
1080
1081 // when the video is a live stream
1082 if (!media.endList) {
1083 var _ret3 = (function () {
1084 var seekable = _this4.seekable();
1085
1086 if (!seekable.length) {
1087 // without a seekable range, the player cannot seek to begin buffering at the live
1088 // point
1089 return {
1090 v: false
1091 };
1092 }
1093
1094 if (_videoJs2['default'].browser.IE_VERSION && _this4.mode_ === 'html5' && _this4.tech_.readyState() === 0) {
1095 // IE11 throws an InvalidStateError if you try to set currentTime while the
1096 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
1097 _this4.tech_.one('loadedmetadata', function () {
1098 _this4.trigger('firstplay');
1099 _this4.tech_.setCurrentTime(seekable.end(0));
1100 _this4.hasPlayed_ = function () {
1101 return true;
1102 };
1103 });
1104
1105 return {
1106 v: false
1107 };
1108 }
1109
1110 // trigger firstplay to inform the source handler to ignore the next seek event
1111 _this4.trigger('firstplay');
1112 // seek to the live point
1113 _this4.tech_.setCurrentTime(seekable.end(0));
1114 })();
1115
1116 if (typeof _ret3 === 'object') return _ret3.v;
1117 }
1118
1119 this.hasPlayed_ = function () {
1120 return true;
1121 };
1122 // we can begin loading now that everything is ready
1123 this.load();
1124 return true;
1125 }
1126
1127 /**
1128 * handle the sourceopen event on the MediaSource
1129 *
1130 * @private
1131 */
1132 }, {
1133 key: 'handleSourceOpen_',
1134 value: function handleSourceOpen_() {
1135 // Only attempt to create the source buffer if none already exist.
1136 // handleSourceOpen is also called when we are "re-opening" a source buffer
1137 // after `endOfStream` has been called (in response to a seek for instance)
1138 try {
1139 this.setupSourceBuffers_();
1140 } catch (e) {
1141 _videoJs2['default'].log.warn('Failed to create Source Buffers', e);
1142 return this.mediaSource.endOfStream('decode');
1143 }
1144
1145 // if autoplay is enabled, begin playback. This is duplicative of
1146 // code in video.js but is required because play() must be invoked
1147 // *after* the media source has opened.
1148 if (this.tech_.autoplay()) {
1149 this.tech_.play();
1150 }
1151
1152 this.trigger('sourceopen');
1153 }
1154
1155 /**
1156 * Calls endOfStream on the media source when all active stream types have called
1157 * endOfStream
1158 *
1159 * @param {string} streamType
1160 * Stream type of the segment loader that called endOfStream
1161 * @private
1162 */
1163 }, {
1164 key: 'onEndOfStream',
1165 value: function onEndOfStream() {
1166 var isEndOfStream = this.mainSegmentLoader_.ended_;
1167
1168 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
1169 // if the audio playlist loader exists, then alternate audio is active, so we need
1170 // to wait for both the main and audio segment loaders to call endOfStream
1171 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
1172 }
1173
1174 if (isEndOfStream) {
1175 this.mediaSource.endOfStream();
1176 }
1177 }
1178
1179 /**
1180 * Check if a playlist has stopped being updated
1181 * @param {Object} playlist the media playlist object
1182 * @return {boolean} whether the playlist has stopped being updated or not
1183 */
1184 }, {
1185 key: 'stuckAtPlaylistEnd_',
1186 value: function stuckAtPlaylistEnd_(playlist) {
1187 var seekable = this.seekable();
1188
1189 if (!seekable.length) {
1190 // playlist doesn't have enough information to determine whether we are stuck
1191 return false;
1192 }
1193
1194 var expired = this.syncController_.getExpiredTime(playlist, this.mediaSource.duration);
1195
1196 if (expired === null) {
1197 return false;
1198 }
1199
1200 // does not use the safe live end to calculate playlist end, since we
1201 // don't want to say we are stuck while there is still content
1202 var absolutePlaylistEnd = Hls.Playlist.playlistEnd(playlist, expired);
1203 var currentTime = this.tech_.currentTime();
1204 var buffered = this.tech_.buffered();
1205
1206 if (!buffered.length) {
1207 // return true if the playhead reached the absolute end of the playlist
1208 return absolutePlaylistEnd - currentTime <= _ranges2['default'].SAFE_TIME_DELTA;
1209 }
1210 var bufferedEnd = buffered.end(buffered.length - 1);
1211
1212 // return true if there is too little buffer left and buffer has reached absolute
1213 // end of playlist
1214 return bufferedEnd - currentTime <= _ranges2['default'].SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= _ranges2['default'].SAFE_TIME_DELTA;
1215 }
1216
1217 /**
1218 * Blacklists a playlist when an error occurs for a set amount of time
1219 * making it unavailable for selection by the rendition selection algorithm
1220 * and then forces a new playlist (rendition) selection.
1221 *
1222 * @param {Object=} error an optional error that may include the playlist
1223 * to blacklist
1224 * @param {Number=} blacklistDuration an optional number of seconds to blacklist the
1225 * playlist
1226 */
1227 }, {
1228 key: 'blacklistCurrentPlaylist',
1229 value: function blacklistCurrentPlaylist(error, blacklistDuration) {
1230 if (error === undefined) error = {};
1231
1232 var currentPlaylist = undefined;
1233 var nextPlaylist = undefined;
1234
1235 // If the `error` was generated by the playlist loader, it will contain
1236 // the playlist we were trying to load (but failed) and that should be
1237 // blacklisted instead of the currently selected playlist which is likely
1238 // out-of-date in this scenario
1239 currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
1240
1241 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration;
1242
1243 // If there is no current playlist, then an error occurred while we were
1244 // trying to load the master OR while we were disposing of the tech
1245 if (!currentPlaylist) {
1246 this.error = error;
1247
1248 try {
1249 return this.mediaSource.endOfStream('network');
1250 } catch (e) {
1251 return this.trigger('error');
1252 }
1253 }
1254
1255 var isFinalRendition = this.masterPlaylistLoader_.isFinalRendition_();
1256
1257 if (isFinalRendition) {
1258 // Never blacklisting this playlist because it's final rendition
1259 _videoJs2['default'].log.warn('Problem encountered with the current ' + 'HLS playlist. Trying again since it is the final playlist.');
1260
1261 this.tech_.trigger('retryplaylist');
1262 return this.masterPlaylistLoader_.load(isFinalRendition);
1263 }
1264 // Blacklist this playlist
1265 currentPlaylist.excludeUntil = Date.now() + blacklistDuration * 1000;
1266 this.tech_.trigger('blacklistplaylist');
1267 this.tech_.trigger({ type: 'usage', name: 'hls-rendition-blacklisted' });
1268
1269 // Select a new playlist
1270 nextPlaylist = this.selectPlaylist();
1271 _videoJs2['default'].log.warn('Problem encountered with the current HLS playlist.' + (error.message ? ' ' + error.message : '') + ' Switching to another playlist.');
1272
1273 return this.masterPlaylistLoader_.media(nextPlaylist);
1274 }
1275
1276 /**
1277 * Pause all segment loaders
1278 */
1279 }, {
1280 key: 'pauseLoading',
1281 value: function pauseLoading() {
1282 this.mainSegmentLoader_.pause();
1283 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
1284 this.audioSegmentLoader_.pause();
1285 }
1286 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
1287 this.subtitleSegmentLoader_.pause();
1288 }
1289 }
1290
1291 /**
1292 * set the current time on all segment loaders
1293 *
1294 * @param {TimeRange} currentTime the current time to set
1295 * @return {TimeRange} the current time
1296 */
1297 }, {
1298 key: 'setCurrentTime',
1299 value: function setCurrentTime(currentTime) {
1300 var buffered = _ranges2['default'].findRange(this.tech_.buffered(), currentTime);
1301
1302 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
1303 // return immediately if the metadata is not ready yet
1304 return 0;
1305 }
1306
1307 // it's clearly an edge-case but don't thrown an error if asked to
1308 // seek within an empty playlist
1309 if (!this.masterPlaylistLoader_.media().segments) {
1310 return 0;
1311 }
1312
1313 // In flash playback, the segment loaders should be reset on every seek, even
1314 // in buffer seeks. If the seek location is already buffered, continue buffering as
1315 // usual
1316 if (buffered && buffered.length && this.mode_ !== 'flash') {
1317 return currentTime;
1318 }
1319
1320 // cancel outstanding requests so we begin buffering at the new
1321 // location
1322 this.mainSegmentLoader_.resetEverything();
1323 this.mainSegmentLoader_.abort();
1324 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
1325 this.audioSegmentLoader_.resetEverything();
1326 this.audioSegmentLoader_.abort();
1327 }
1328 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
1329 this.subtitleSegmentLoader_.resetEverything();
1330 this.subtitleSegmentLoader_.abort();
1331 }
1332
1333 // start segment loader loading in case they are paused
1334 this.load();
1335 }
1336
1337 /**
1338 * get the current duration
1339 *
1340 * @return {TimeRange} the duration
1341 */
1342 }, {
1343 key: 'duration',
1344 value: function duration() {
1345 if (!this.masterPlaylistLoader_) {
1346 return 0;
1347 }
1348
1349 if (this.mediaSource) {
1350 return this.mediaSource.duration;
1351 }
1352
1353 return Hls.Playlist.duration(this.masterPlaylistLoader_.media());
1354 }
1355
1356 /**
1357 * check the seekable range
1358 *
1359 * @return {TimeRange} the seekable range
1360 */
1361 }, {
1362 key: 'seekable',
1363 value: function seekable() {
1364 return this.seekable_;
1365 }
1366 }, {
1367 key: 'onSyncInfoUpdate_',
1368 value: function onSyncInfoUpdate_() {
1369 var mainSeekable = undefined;
1370 var audioSeekable = undefined;
1371
1372 if (!this.masterPlaylistLoader_) {
1373 return;
1374 }
1375
1376 var media = this.masterPlaylistLoader_.media();
1377
1378 if (!media) {
1379 return;
1380 }
1381
1382 var expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
1383
1384 if (expired === null) {
1385 // not enough information to update seekable
1386 return;
1387 }
1388
1389 mainSeekable = Hls.Playlist.seekable(media, expired);
1390
1391 if (mainSeekable.length === 0) {
1392 return;
1393 }
1394
1395 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
1396 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
1397 expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
1398
1399 if (expired === null) {
1400 return;
1401 }
1402
1403 audioSeekable = Hls.Playlist.seekable(media, expired);
1404
1405 if (audioSeekable.length === 0) {
1406 return;
1407 }
1408 }
1409
1410 if (!audioSeekable) {
1411 // seekable has been calculated based on buffering video data so it
1412 // can be returned directly
1413 this.seekable_ = mainSeekable;
1414 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
1415 // seekables are pretty far off, rely on main
1416 this.seekable_ = mainSeekable;
1417 } else {
1418 this.seekable_ = _videoJs2['default'].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
1419 }
1420
1421 this.tech_.trigger('seekablechanged');
1422 }
1423
1424 /**
1425 * Update the player duration
1426 */
1427 }, {
1428 key: 'updateDuration',
1429 value: function updateDuration() {
1430 var _this5 = this;
1431
1432 var oldDuration = this.mediaSource.duration;
1433 var newDuration = Hls.Playlist.duration(this.masterPlaylistLoader_.media());
1434 var buffered = this.tech_.buffered();
1435 var setDuration = function setDuration() {
1436 _this5.mediaSource.duration = newDuration;
1437 _this5.tech_.trigger('durationchange');
1438
1439 _this5.mediaSource.removeEventListener('sourceopen', setDuration);
1440 };
1441
1442 if (buffered.length > 0) {
1443 newDuration = Math.max(newDuration, buffered.end(buffered.length - 1));
1444 }
1445
1446 // if the duration has changed, invalidate the cached value
1447 if (oldDuration !== newDuration) {
1448 // update the duration
1449 if (this.mediaSource.readyState !== 'open') {
1450 this.mediaSource.addEventListener('sourceopen', setDuration);
1451 } else {
1452 setDuration();
1453 }
1454 }
1455 }
1456
1457 /**
1458 * dispose of the MasterPlaylistController and everything
1459 * that it controls
1460 */
1461 }, {
1462 key: 'dispose',
1463 value: function dispose() {
1464 var _this6 = this;
1465
1466 this.decrypter_.terminate();
1467 this.masterPlaylistLoader_.dispose();
1468 this.mainSegmentLoader_.dispose();
1469
1470 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
1471 var groups = _this6.mediaTypes_[type].groups;
1472
1473 for (var id in groups) {
1474 groups[id].forEach(function (group) {
1475 if (group.playlistLoader) {
1476 group.playlistLoader.dispose();
1477 }
1478 });
1479 }
1480 });
1481
1482 this.audioSegmentLoader_.dispose();
1483 this.subtitleSegmentLoader_.dispose();
1484 }
1485
1486 /**
1487 * return the master playlist object if we have one
1488 *
1489 * @return {Object} the master playlist object that we parsed
1490 */
1491 }, {
1492 key: 'master',
1493 value: function master() {
1494 return this.masterPlaylistLoader_.master;
1495 }
1496
1497 /**
1498 * return the currently selected playlist
1499 *
1500 * @return {Object} the currently selected playlist object that we parsed
1501 */
1502 }, {
1503 key: 'media',
1504 value: function media() {
1505 // playlist loader will not return media if it has not been fully loaded
1506 return this.masterPlaylistLoader_.media() || this.initialMedia_;
1507 }
1508
1509 /**
1510 * setup our internal source buffers on our segment Loaders
1511 *
1512 * @private
1513 */
1514 }, {
1515 key: 'setupSourceBuffers_',
1516 value: function setupSourceBuffers_() {
1517 var media = this.masterPlaylistLoader_.media();
1518 var mimeTypes = undefined;
1519
1520 // wait until a media playlist is available and the Media Source is
1521 // attached
1522 if (!media || this.mediaSource.readyState !== 'open') {
1523 return;
1524 }
1525
1526 mimeTypes = mimeTypesForPlaylist_(this.masterPlaylistLoader_.master, media);
1527 if (mimeTypes.length < 1) {
1528 this.error = 'No compatible SourceBuffer configuration for the variant stream:' + media.resolvedUri;
1529 return this.mediaSource.endOfStream('decode');
1530 }
1531 this.mainSegmentLoader_.mimeType(mimeTypes[0]);
1532 if (mimeTypes[1]) {
1533 this.audioSegmentLoader_.mimeType(mimeTypes[1]);
1534 }
1535
1536 // exclude any incompatible variant streams from future playlist
1537 // selection
1538 this.excludeIncompatibleVariants_(media);
1539 }
1540
1541 /**
1542 * Blacklist playlists that are known to be codec or
1543 * stream-incompatible with the SourceBuffer configuration. For
1544 * instance, Media Source Extensions would cause the video element to
1545 * stall waiting for video data if you switched from a variant with
1546 * video and audio to an audio-only one.
1547 *
1548 * @param {Object} media a media playlist compatible with the current
1549 * set of SourceBuffers. Variants in the current master playlist that
1550 * do not appear to have compatible codec or stream configurations
1551 * will be excluded from the default playlist selection algorithm
1552 * indefinitely.
1553 * @private
1554 */
1555 }, {
1556 key: 'excludeIncompatibleVariants_',
1557 value: function excludeIncompatibleVariants_(media) {
1558 var master = this.masterPlaylistLoader_.master;
1559 var codecCount = 2;
1560 var videoCodec = null;
1561 var codecs = undefined;
1562
1563 if (media.attributes.CODECS) {
1564 codecs = (0, _utilCodecsJs.parseCodecs)(media.attributes.CODECS);
1565 videoCodec = codecs.videoCodec;
1566 codecCount = codecs.codecCount;
1567 }
1568 master.playlists.forEach(function (variant) {
1569 var variantCodecs = {
1570 codecCount: 2,
1571 videoCodec: null
1572 };
1573
1574 if (variant.attributes.CODECS) {
1575 var codecString = variant.attributes.CODECS;
1576
1577 variantCodecs = (0, _utilCodecsJs.parseCodecs)(codecString);
1578
1579 if (window.MediaSource && window.MediaSource.isTypeSupported && !window.MediaSource.isTypeSupported('video/mp4; codecs="' + mapLegacyAvcCodecs_(codecString) + '"')) {
1580 variant.excludeUntil = Infinity;
1581 }
1582 }
1583
1584 // if the streams differ in the presence or absence of audio or
1585 // video, they are incompatible
1586 if (variantCodecs.codecCount !== codecCount) {
1587 variant.excludeUntil = Infinity;
1588 }
1589
1590 // if h.264 is specified on the current playlist, some flavor of
1591 // it must be specified on all compatible variants
1592 if (variantCodecs.videoCodec !== videoCodec) {
1593 variant.excludeUntil = Infinity;
1594 }
1595 });
1596 }
1597 }, {
1598 key: 'updateAdCues_',
1599 value: function updateAdCues_(media) {
1600 var offset = 0;
1601 var seekable = this.seekable();
1602
1603 if (seekable.length) {
1604 offset = seekable.start(0);
1605 }
1606
1607 _adCueTags2['default'].updateAdCues(media, this.cueTagsTrack_, offset);
1608 }
1609
1610 /**
1611 * Calculates the desired forward buffer length based on current time
1612 *
1613 * @return {Number} Desired forward buffer length in seconds
1614 */
1615 }, {
1616 key: 'goalBufferLength',
1617 value: function goalBufferLength() {
1618 var currentTime = this.tech_.currentTime();
1619 var initial = _config2['default'].GOAL_BUFFER_LENGTH;
1620 var rate = _config2['default'].GOAL_BUFFER_LENGTH_RATE;
1621 var max = Math.max(initial, _config2['default'].MAX_GOAL_BUFFER_LENGTH);
1622
1623 return Math.min(initial + currentTime * rate, max);
1624 }
1625
1626 /**
1627 * Calculates the desired buffer low water line based on current time
1628 *
1629 * @return {Number} Desired buffer low water line in seconds
1630 */
1631 }, {
1632 key: 'bufferLowWaterLine',
1633 value: function bufferLowWaterLine() {
1634 var currentTime = this.tech_.currentTime();
1635 var initial = _config2['default'].BUFFER_LOW_WATER_LINE;
1636 var rate = _config2['default'].BUFFER_LOW_WATER_LINE_RATE;
1637 var max = Math.max(initial, _config2['default'].MAX_BUFFER_LOW_WATER_LINE);
1638
1639 return Math.min(initial + currentTime * rate, max);
1640 }
1641 }]);
1642
1643 return MasterPlaylistController;
1644})(_videoJs2['default'].EventTarget);
1645
1646exports.MasterPlaylistController = MasterPlaylistController;
1647}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
1648},{"./ad-cue-tags":1,"./config":3,"./decrypter-worker":4,"./media-groups":6,"./playlist-loader":9,"./ranges":12,"./segment-loader":16,"./sync-controller":18,"./util/codecs.js":19,"./vtt-segment-loader":20,"videojs-contrib-media-sources/es5/codec-utils":65,"webworkify":76}],6:[function(require,module,exports){
1649(function (global){
1650'use strict';
1651
1652Object.defineProperty(exports, '__esModule', {
1653 value: true
1654});
1655
1656function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
1657
1658var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
1659
1660var _videoJs2 = _interopRequireDefault(_videoJs);
1661
1662var _playlistLoader = require('./playlist-loader');
1663
1664var _playlistLoader2 = _interopRequireDefault(_playlistLoader);
1665
1666var noop = function noop() {};
1667
1668/**
1669 * Convert the properties of an HLS track into an audioTrackKind.
1670 *
1671 * @private
1672 */
1673var audioTrackKind_ = function audioTrackKind_(properties) {
1674 var kind = properties['default'] ? 'main' : 'alternative';
1675
1676 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
1677 kind = 'main-desc';
1678 }
1679
1680 return kind;
1681};
1682
1683/**
1684 * Pause provided segment loader and playlist loader if active
1685 *
1686 * @param {SegmentLoader} segmentLoader
1687 * SegmentLoader to pause
1688 * @param {Object} mediaType
1689 * Active media type
1690 * @function stopLoaders
1691 */
1692var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
1693 segmentLoader.abort();
1694 segmentLoader.pause();
1695
1696 if (mediaType && mediaType.activePlaylistLoader) {
1697 mediaType.activePlaylistLoader.pause();
1698 mediaType.activePlaylistLoader = null;
1699 }
1700};
1701
1702exports.stopLoaders = stopLoaders;
1703/**
1704 * Start loading provided segment loader and playlist loader
1705 *
1706 * @param {PlaylistLoader} playlistLoader
1707 * PlaylistLoader to start loading
1708 * @param {Object} mediaType
1709 * Active media type
1710 * @function startLoaders
1711 */
1712var startLoaders = function startLoaders(playlistLoader, mediaType) {
1713 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
1714 // playlist loader
1715 mediaType.activePlaylistLoader = playlistLoader;
1716 playlistLoader.load();
1717};
1718
1719exports.startLoaders = startLoaders;
1720/**
1721 * Returns a function to be called when the media group changes. It performs a
1722 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
1723 * change of group is merely a rendition switch of the same content at another encoding,
1724 * rather than a change of content, such as switching audio from English to Spanish.
1725 *
1726 * @param {String} type
1727 * MediaGroup type
1728 * @param {Object} settings
1729 * Object containing required information for media groups
1730 * @return {Function}
1731 * Handler for a non-destructive resync of SegmentLoader when the active media
1732 * group changes.
1733 * @function onGroupChanged
1734 */
1735var onGroupChanged = function onGroupChanged(type, settings) {
1736 return function () {
1737 var _settings$segmentLoaders = settings.segmentLoaders;
1738 var segmentLoader = _settings$segmentLoaders[type];
1739 var mainSegmentLoader = _settings$segmentLoaders.main;
1740 var mediaType = settings.mediaTypes[type];
1741
1742 var activeTrack = mediaType.activeTrack();
1743 var activeGroup = mediaType.activeGroup(activeTrack);
1744 var previousActiveLoader = mediaType.activePlaylistLoader;
1745
1746 stopLoaders(segmentLoader, mediaType);
1747
1748 if (!activeGroup) {
1749 // there is no group active
1750 return;
1751 }
1752
1753 if (!activeGroup.playlistLoader) {
1754 if (previousActiveLoader) {
1755 // The previous group had a playlist loader but the new active group does not
1756 // this means we are switching from demuxed to muxed audio. In this case we want to
1757 // do a destructive reset of the main segment loader and not restart the audio
1758 // loaders.
1759 mainSegmentLoader.resetEverything();
1760 }
1761 return;
1762 }
1763
1764 // Non-destructive resync
1765 segmentLoader.resyncLoader();
1766
1767 startLoaders(activeGroup.playlistLoader, mediaType);
1768 };
1769};
1770
1771exports.onGroupChanged = onGroupChanged;
1772/**
1773 * Returns a function to be called when the media track changes. It performs a
1774 * destructive reset of the SegmentLoader to ensure we start loading as close to
1775 * currentTime as possible.
1776 *
1777 * @param {String} type
1778 * MediaGroup type
1779 * @param {Object} settings
1780 * Object containing required information for media groups
1781 * @return {Function}
1782 * Handler for a destructive reset of SegmentLoader when the active media
1783 * track changes.
1784 * @function onTrackChanged
1785 */
1786var onTrackChanged = function onTrackChanged(type, settings) {
1787 return function () {
1788 var _settings$segmentLoaders2 = settings.segmentLoaders;
1789 var segmentLoader = _settings$segmentLoaders2[type];
1790 var mainSegmentLoader = _settings$segmentLoaders2.main;
1791 var mediaType = settings.mediaTypes[type];
1792
1793 var activeTrack = mediaType.activeTrack();
1794 var activeGroup = mediaType.activeGroup(activeTrack);
1795 var previousActiveLoader = mediaType.activePlaylistLoader;
1796
1797 stopLoaders(segmentLoader, mediaType);
1798
1799 if (!activeGroup) {
1800 // there is no group active so we do not want to restart loaders
1801 return;
1802 }
1803
1804 if (!activeGroup.playlistLoader) {
1805 // when switching from demuxed audio/video to muxed audio/video (noted by no playlist
1806 // loader for the audio group), we want to do a destructive reset of the main segment
1807 // loader and not restart the audio loaders
1808 mainSegmentLoader.resetEverything();
1809 return;
1810 }
1811
1812 if (previousActiveLoader === activeGroup.playlistLoader) {
1813 // Nothing has actually changed. This can happen because track change events can fire
1814 // multiple times for a "single" change. One for enabling the new active track, and
1815 // one for disabling the track that was active
1816 startLoaders(activeGroup.playlistLoader, mediaType);
1817 return;
1818 }
1819
1820 if (segmentLoader.track) {
1821 // For WebVTT, set the new text track in the segmentloader
1822 segmentLoader.track(activeTrack);
1823 }
1824
1825 // destructive reset
1826 segmentLoader.resetEverything();
1827
1828 startLoaders(activeGroup.playlistLoader, mediaType);
1829 };
1830};
1831
1832exports.onTrackChanged = onTrackChanged;
1833var onError = {
1834 /**
1835 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
1836 * an error.
1837 *
1838 * @param {String} type
1839 * MediaGroup type
1840 * @param {Object} settings
1841 * Object containing required information for media groups
1842 * @return {Function}
1843 * Error handler. Logs warning (or error if the playlist is blacklisted) to
1844 * console and switches back to default audio track.
1845 * @function onError.AUDIO
1846 */
1847 AUDIO: function AUDIO(type, settings) {
1848 return function () {
1849 var segmentLoader = settings.segmentLoaders[type];
1850 var mediaType = settings.mediaTypes[type];
1851 var blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
1852
1853 stopLoaders(segmentLoader, mediaType);
1854
1855 // switch back to default audio track
1856 var activeTrack = mediaType.activeTrack();
1857 var activeGroup = mediaType.activeGroup();
1858 var id = (activeGroup.filter(function (group) {
1859 return group['default'];
1860 })[0] || activeGroup[0]).id;
1861 var defaultTrack = mediaType.tracks[id];
1862
1863 if (activeTrack === defaultTrack) {
1864 // Default track encountered an error. All we can do now is blacklist the current
1865 // rendition and hope another will switch audio groups
1866 blacklistCurrentPlaylist({
1867 message: 'Problem encountered loading the default audio track.'
1868 });
1869 return;
1870 }
1871
1872 _videoJs2['default'].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
1873
1874 for (var trackId in mediaType.tracks) {
1875 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
1876 }
1877
1878 mediaType.onTrackChanged();
1879 };
1880 },
1881 /**
1882 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
1883 * an error.
1884 *
1885 * @param {String} type
1886 * MediaGroup type
1887 * @param {Object} settings
1888 * Object containing required information for media groups
1889 * @return {Function}
1890 * Error handler. Logs warning to console and disables the active subtitle track
1891 * @function onError.SUBTITLES
1892 */
1893 SUBTITLES: function SUBTITLES(type, settings) {
1894 return function () {
1895 var segmentLoader = settings.segmentLoaders[type];
1896 var mediaType = settings.mediaTypes[type];
1897
1898 _videoJs2['default'].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
1899
1900 stopLoaders(segmentLoader, mediaType);
1901
1902 var track = mediaType.activeTrack();
1903
1904 if (track) {
1905 track.mode = 'disabled';
1906 }
1907
1908 mediaType.onTrackChanged();
1909 };
1910 }
1911};
1912
1913exports.onError = onError;
1914var setupListeners = {
1915 /**
1916 * Setup event listeners for audio playlist loader
1917 *
1918 * @param {String} type
1919 * MediaGroup type
1920 * @param {PlaylistLoader|null} playlistLoader
1921 * PlaylistLoader to register listeners on
1922 * @param {Object} settings
1923 * Object containing required information for media groups
1924 * @function setupListeners.AUDIO
1925 */
1926 AUDIO: function AUDIO(type, playlistLoader, settings) {
1927 if (!playlistLoader) {
1928 // no playlist loader means audio will be muxed with the video
1929 return;
1930 }
1931
1932 var tech = settings.tech;
1933 var requestOptions = settings.requestOptions;
1934 var segmentLoader = settings.segmentLoaders[type];
1935
1936 playlistLoader.on('loadedmetadata', function () {
1937 var media = playlistLoader.media();
1938
1939 segmentLoader.playlist(media, requestOptions);
1940
1941 // if the video is already playing, or if this isn't a live video and preload
1942 // permits, start downloading segments
1943 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
1944 segmentLoader.load();
1945 }
1946 });
1947
1948 playlistLoader.on('loadedplaylist', function () {
1949 segmentLoader.playlist(playlistLoader.media(), requestOptions);
1950
1951 // If the player isn't paused, ensure that the segment loader is running
1952 if (!tech.paused()) {
1953 segmentLoader.load();
1954 }
1955 });
1956
1957 playlistLoader.on('error', onError[type](type, settings));
1958 },
1959 /**
1960 * Setup event listeners for subtitle playlist loader
1961 *
1962 * @param {String} type
1963 * MediaGroup type
1964 * @param {PlaylistLoader|null} playlistLoader
1965 * PlaylistLoader to register listeners on
1966 * @param {Object} settings
1967 * Object containing required information for media groups
1968 * @function setupListeners.SUBTITLES
1969 */
1970 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
1971 var tech = settings.tech;
1972 var requestOptions = settings.requestOptions;
1973 var segmentLoader = settings.segmentLoaders[type];
1974 var mediaType = settings.mediaTypes[type];
1975
1976 playlistLoader.on('loadedmetadata', function () {
1977 var media = playlistLoader.media();
1978
1979 segmentLoader.playlist(media, requestOptions);
1980 segmentLoader.track(mediaType.activeTrack());
1981
1982 // if the video is already playing, or if this isn't a live video and preload
1983 // permits, start downloading segments
1984 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
1985 segmentLoader.load();
1986 }
1987 });
1988
1989 playlistLoader.on('loadedplaylist', function () {
1990 segmentLoader.playlist(playlistLoader.media(), requestOptions);
1991
1992 // If the player isn't paused, ensure that the segment loader is running
1993 if (!tech.paused()) {
1994 segmentLoader.load();
1995 }
1996 });
1997
1998 playlistLoader.on('error', onError[type](type, settings));
1999 }
2000};
2001
2002exports.setupListeners = setupListeners;
2003var initialize = {
2004 /**
2005 * Setup PlaylistLoaders and AudioTracks for the audio groups
2006 *
2007 * @param {String} type
2008 * MediaGroup type
2009 * @param {Object} settings
2010 * Object containing required information for media groups
2011 * @function initialize.AUDIO
2012 */
2013 'AUDIO': function AUDIO(type, settings) {
2014 var mode = settings.mode;
2015 var hls = settings.hls;
2016 var segmentLoader = settings.segmentLoaders[type];
2017 var withCredentials = settings.requestOptions.withCredentials;
2018 var mediaGroups = settings.master.mediaGroups;
2019 var _settings$mediaTypes$type = settings.mediaTypes[type];
2020 var groups = _settings$mediaTypes$type.groups;
2021 var tracks = _settings$mediaTypes$type.tracks;
2022
2023 // force a default if we have none or we are not
2024 // in html5 mode (the only mode to support more than one
2025 // audio track)
2026 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0 || mode !== 'html5') {
2027 mediaGroups[type] = { main: { 'default': { 'default': true } } };
2028 }
2029
2030 for (var groupId in mediaGroups[type]) {
2031 if (!groups[groupId]) {
2032 groups[groupId] = [];
2033 }
2034
2035 for (var variantLabel in mediaGroups[type][groupId]) {
2036 var properties = mediaGroups[type][groupId][variantLabel];
2037 var playlistLoader = undefined;
2038
2039 if (properties.resolvedUri) {
2040 playlistLoader = new _playlistLoader2['default'](properties.resolvedUri, hls, withCredentials);
2041 } else {
2042 // no resolvedUri means the audio is muxed with the video when using this
2043 // audio track
2044 playlistLoader = null;
2045 }
2046
2047 properties = _videoJs2['default'].mergeOptions({ id: variantLabel, playlistLoader: playlistLoader }, properties);
2048
2049 setupListeners[type](type, properties.playlistLoader, settings);
2050
2051 groups[groupId].push(properties);
2052
2053 if (typeof tracks[variantLabel] === 'undefined') {
2054 var track = new _videoJs2['default'].AudioTrack({
2055 id: variantLabel,
2056 kind: audioTrackKind_(properties),
2057 enabled: false,
2058 language: properties.language,
2059 'default': properties['default'],
2060 label: variantLabel
2061 });
2062
2063 tracks[variantLabel] = track;
2064 }
2065 }
2066 }
2067
2068 // setup single error event handler for the segment loader
2069 segmentLoader.on('error', onError[type](type, settings));
2070 },
2071 /**
2072 * Setup PlaylistLoaders and TextTracks for the subtitle groups
2073 *
2074 * @param {String} type
2075 * MediaGroup type
2076 * @param {Object} settings
2077 * Object containing required information for media groups
2078 * @function initialize.SUBTITLES
2079 */
2080 'SUBTITLES': function SUBTITLES(type, settings) {
2081 var tech = settings.tech;
2082 var hls = settings.hls;
2083 var segmentLoader = settings.segmentLoaders[type];
2084 var withCredentials = settings.requestOptions.withCredentials;
2085 var mediaGroups = settings.master.mediaGroups;
2086 var _settings$mediaTypes$type2 = settings.mediaTypes[type];
2087 var groups = _settings$mediaTypes$type2.groups;
2088 var tracks = _settings$mediaTypes$type2.tracks;
2089
2090 for (var groupId in mediaGroups[type]) {
2091 if (!groups[groupId]) {
2092 groups[groupId] = [];
2093 }
2094
2095 for (var variantLabel in mediaGroups[type][groupId]) {
2096 if (mediaGroups[type][groupId][variantLabel].forced) {
2097 // Subtitle playlists with the forced attribute are not selectable in Safari.
2098 // According to Apple's HLS Authoring Specification:
2099 // If content has forced subtitles and regular subtitles in a given language,
2100 // the regular subtitles track in that language MUST contain both the forced
2101 // subtitles and the regular subtitles for that language.
2102 // Because of this requirement and that Safari does not add forced subtitles,
2103 // forced subtitles are skipped here to maintain consistent experience across
2104 // all platforms
2105 continue;
2106 }
2107
2108 var properties = mediaGroups[type][groupId][variantLabel];
2109
2110 properties = _videoJs2['default'].mergeOptions({
2111 id: variantLabel,
2112 playlistLoader: new _playlistLoader2['default'](properties.resolvedUri, hls, withCredentials)
2113 }, properties);
2114
2115 setupListeners[type](type, properties.playlistLoader, settings);
2116
2117 groups[groupId].push(properties);
2118
2119 if (typeof tracks[variantLabel] === 'undefined') {
2120 var track = tech.addRemoteTextTrack({
2121 id: variantLabel,
2122 kind: 'subtitles',
2123 enabled: false,
2124 language: properties.language,
2125 label: variantLabel
2126 }, false).track;
2127
2128 tracks[variantLabel] = track;
2129 }
2130 }
2131 }
2132
2133 // setup single error event handler for the segment loader
2134 segmentLoader.on('error', onError[type](type, settings));
2135 },
2136 /**
2137 * Setup TextTracks for the closed-caption groups
2138 *
2139 * @param {String} type
2140 * MediaGroup type
2141 * @param {Object} settings
2142 * Object containing required information for media groups
2143 * @function initialize['CLOSED-CAPTIONS']
2144 */
2145 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
2146 var tech = settings.tech;
2147 var mediaGroups = settings.master.mediaGroups;
2148 var _settings$mediaTypes$type3 = settings.mediaTypes[type];
2149 var groups = _settings$mediaTypes$type3.groups;
2150 var tracks = _settings$mediaTypes$type3.tracks;
2151
2152 for (var groupId in mediaGroups[type]) {
2153 if (!groups[groupId]) {
2154 groups[groupId] = [];
2155 }
2156
2157 for (var variantLabel in mediaGroups[type][groupId]) {
2158 var properties = mediaGroups[type][groupId][variantLabel];
2159
2160 // We only support CEA608 captions for now, so ignore anything that
2161 // doesn't use a CCx INSTREAM-ID
2162 if (!properties.instreamId.match(/CC\d/)) {
2163 continue;
2164 }
2165
2166 // No PlaylistLoader is required for Closed-Captions because the captions are
2167 // embedded within the video stream
2168 groups[groupId].push(_videoJs2['default'].mergeOptions({ id: variantLabel }, properties));
2169
2170 if (typeof tracks[variantLabel] === 'undefined') {
2171 var track = tech.addRemoteTextTrack({
2172 id: properties.instreamId,
2173 kind: 'captions',
2174 enabled: false,
2175 language: properties.language,
2176 label: variantLabel
2177 }, false).track;
2178
2179 tracks[variantLabel] = track;
2180 }
2181 }
2182 }
2183 }
2184};
2185
2186exports.initialize = initialize;
2187/**
2188 * Returns a function used to get the active group of the provided type
2189 *
2190 * @param {String} type
2191 * MediaGroup type
2192 * @param {Object} settings
2193 * Object containing required information for media groups
2194 * @return {Function}
2195 * Function that returns the active media group for the provided type. Takes an
2196 * optional parameter {TextTrack} track. If no track is provided, a list of all
2197 * variants in the group, otherwise the variant corresponding to the provided
2198 * track is returned.
2199 * @function activeGroup
2200 */
2201var activeGroup = function activeGroup(type, settings) {
2202 return function (track) {
2203 var masterPlaylistLoader = settings.masterPlaylistLoader;
2204 var groups = settings.mediaTypes[type].groups;
2205
2206 var media = masterPlaylistLoader.media();
2207
2208 if (!media) {
2209 return null;
2210 }
2211
2212 var variants = null;
2213
2214 if (media.attributes[type]) {
2215 variants = groups[media.attributes[type]];
2216 }
2217
2218 variants = variants || groups.main;
2219
2220 if (typeof track === 'undefined') {
2221 return variants;
2222 }
2223
2224 if (track === null) {
2225 // An active track was specified so a corresponding group is expected. track === null
2226 // means no track is currently active so there is no corresponding group
2227 return null;
2228 }
2229
2230 return variants.filter(function (props) {
2231 return props.id === track.id;
2232 })[0] || null;
2233 };
2234};
2235
2236exports.activeGroup = activeGroup;
2237var activeTrack = {
2238 /**
2239 * Returns a function used to get the active track of type provided
2240 *
2241 * @param {String} type
2242 * MediaGroup type
2243 * @param {Object} settings
2244 * Object containing required information for media groups
2245 * @return {Function}
2246 * Function that returns the active media track for the provided type. Returns
2247 * null if no track is active
2248 * @function activeTrack.AUDIO
2249 */
2250 AUDIO: function AUDIO(type, settings) {
2251 return function () {
2252 var tracks = settings.mediaTypes[type].tracks;
2253
2254 for (var id in tracks) {
2255 if (tracks[id].enabled) {
2256 return tracks[id];
2257 }
2258 }
2259
2260 return null;
2261 };
2262 },
2263 /**
2264 * Returns a function used to get the active track of type provided
2265 *
2266 * @param {String} type
2267 * MediaGroup type
2268 * @param {Object} settings
2269 * Object containing required information for media groups
2270 * @return {Function}
2271 * Function that returns the active media track for the provided type. Returns
2272 * null if no track is active
2273 * @function activeTrack.SUBTITLES
2274 */
2275 SUBTITLES: function SUBTITLES(type, settings) {
2276 return function () {
2277 var tracks = settings.mediaTypes[type].tracks;
2278
2279 for (var id in tracks) {
2280 if (tracks[id].mode === 'showing') {
2281 return tracks[id];
2282 }
2283 }
2284
2285 return null;
2286 };
2287 }
2288};
2289
2290exports.activeTrack = activeTrack;
2291/**
2292 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
2293 * Closed-Captions) specified in the master manifest.
2294 *
2295 * @param {Object} settings
2296 * Object containing required information for setting up the media groups
2297 * @param {SegmentLoader} settings.segmentLoaders.AUDIO
2298 * Audio segment loader
2299 * @param {SegmentLoader} settings.segmentLoaders.SUBTITLES
2300 * Subtitle segment loader
2301 * @param {SegmentLoader} settings.segmentLoaders.main
2302 * Main segment loader
2303 * @param {Tech} settings.tech
2304 * The tech of the player
2305 * @param {Object} settings.requestOptions
2306 * XHR request options used by the segment loaders
2307 * @param {PlaylistLoader} settings.masterPlaylistLoader
2308 * PlaylistLoader for the master source
2309 * @param {String} mode
2310 * Mode of the hls source handler. Can be 'auto', 'html5', or 'flash'
2311 * @param {HlsHandler} settings.hls
2312 * HLS SourceHandler
2313 * @param {Object} settings.master
2314 * The parsed master manifest
2315 * @param {Object} settings.mediaTypes
2316 * Object to store the loaders, tracks, and utility methods for each media type
2317 * @param {Function} settings.blacklistCurrentPlaylist
2318 * Blacklists the current rendition and forces a rendition switch.
2319 * @function setupMediaGroups
2320 */
2321var setupMediaGroups = function setupMediaGroups(settings) {
2322 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
2323 initialize[type](type, settings);
2324 });
2325
2326 var mediaTypes = settings.mediaTypes;
2327 var masterPlaylistLoader = settings.masterPlaylistLoader;
2328 var tech = settings.tech;
2329 var hls = settings.hls;
2330
2331 // setup active group and track getters and change event handlers
2332 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
2333 mediaTypes[type].activeGroup = activeGroup(type, settings);
2334 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
2335 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
2336 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
2337 });
2338
2339 // DO NOT enable the default subtitle or caption track.
2340 // DO enable the default audio track
2341 var audioGroup = mediaTypes.AUDIO.activeGroup();
2342 var groupId = (audioGroup.filter(function (group) {
2343 return group['default'];
2344 })[0] || audioGroup[0]).id;
2345
2346 mediaTypes.AUDIO.tracks[groupId].enabled = true;
2347 mediaTypes.AUDIO.onTrackChanged();
2348
2349 masterPlaylistLoader.on('mediachange', function () {
2350 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
2351 return mediaTypes[type].onGroupChanged();
2352 });
2353 });
2354
2355 // custom audio track change event handler for usage event
2356 var onAudioTrackChanged = function onAudioTrackChanged() {
2357 mediaTypes.AUDIO.onTrackChanged();
2358 tech.trigger({ type: 'usage', name: 'hls-audio-change' });
2359 };
2360
2361 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
2362 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
2363
2364 hls.on('dispose', function () {
2365 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
2366 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
2367 });
2368
2369 // clear existing audio tracks and add the ones we just created
2370 tech.clearTracks('audio');
2371
2372 for (var id in mediaTypes.AUDIO.tracks) {
2373 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
2374 }
2375};
2376
2377exports.setupMediaGroups = setupMediaGroups;
2378/**
2379 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
2380 * media type
2381 *
2382 * @return {Object}
2383 * Object to store the loaders, tracks, and utility methods for each media type
2384 * @function createMediaTypes
2385 */
2386var createMediaTypes = function createMediaTypes() {
2387 var mediaTypes = {};
2388
2389 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
2390 mediaTypes[type] = {
2391 groups: {},
2392 tracks: {},
2393 activePlaylistLoader: null,
2394 activeGroup: noop,
2395 activeTrack: noop,
2396 onGroupChanged: noop,
2397 onTrackChanged: noop
2398 };
2399 });
2400
2401 return mediaTypes;
2402};
2403exports.createMediaTypes = createMediaTypes;
2404}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
2405},{"./playlist-loader":9}],7:[function(require,module,exports){
2406(function (global){
2407'use strict';
2408
2409Object.defineProperty(exports, '__esModule', {
2410 value: true
2411});
2412
2413function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
2414
2415var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
2416
2417var _videoJs2 = _interopRequireDefault(_videoJs);
2418
2419var _binUtils = require('./bin-utils');
2420
2421var REQUEST_ERRORS = {
2422 FAILURE: 2,
2423 TIMEOUT: -101,
2424 ABORTED: -102
2425};
2426
2427exports.REQUEST_ERRORS = REQUEST_ERRORS;
2428/**
2429 * Turns segment byterange into a string suitable for use in
2430 * HTTP Range requests
2431 *
2432 * @param {Object} byterange - an object with two values defining the start and end
2433 * of a byte-range
2434 */
2435var byterangeStr = function byterangeStr(byterange) {
2436 var byterangeStart = undefined;
2437 var byterangeEnd = undefined;
2438
2439 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2440 // header uses inclusive ranges
2441 byterangeEnd = byterange.offset + byterange.length - 1;
2442 byterangeStart = byterange.offset;
2443 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2444};
2445
2446/**
2447 * Defines headers for use in the xhr request for a particular segment.
2448 *
2449 * @param {Object} segment - a simplified copy of the segmentInfo object
2450 * from SegmentLoader
2451 */
2452var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2453 var headers = {};
2454
2455 if (segment.byterange) {
2456 headers.Range = byterangeStr(segment.byterange);
2457 }
2458 return headers;
2459};
2460
2461/**
2462 * Abort all requests
2463 *
2464 * @param {Object} activeXhrs - an object that tracks all XHR requests
2465 */
2466var abortAll = function abortAll(activeXhrs) {
2467 activeXhrs.forEach(function (xhr) {
2468 xhr.abort();
2469 });
2470};
2471
2472/**
2473 * Gather important bandwidth stats once a request has completed
2474 *
2475 * @param {Object} request - the XHR request from which to gather stats
2476 */
2477var getRequestStats = function getRequestStats(request) {
2478 return {
2479 bandwidth: request.bandwidth,
2480 bytesReceived: request.bytesReceived || 0,
2481 roundTripTime: request.roundTripTime || 0
2482 };
2483};
2484
2485/**
2486 * If possible gather bandwidth stats as a request is in
2487 * progress
2488 *
2489 * @param {Event} progressEvent - an event object from an XHR's progress event
2490 */
2491var getProgressStats = function getProgressStats(progressEvent) {
2492 var request = progressEvent.target;
2493 var roundTripTime = Date.now() - request.requestTime;
2494 var stats = {
2495 bandwidth: Infinity,
2496 bytesReceived: 0,
2497 roundTripTime: roundTripTime || 0
2498 };
2499
2500 stats.bytesReceived = progressEvent.loaded;
2501 // This can result in Infinity if stats.roundTripTime is 0 but that is ok
2502 // because we should only use bandwidth stats on progress to determine when
2503 // abort a request early due to insufficient bandwidth
2504 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
2505
2506 return stats;
2507};
2508
2509/**
2510 * Handle all error conditions in one place and return an object
2511 * with all the information
2512 *
2513 * @param {Error|null} error - if non-null signals an error occured with the XHR
2514 * @param {Object} request - the XHR request that possibly generated the error
2515 */
2516var handleErrors = function handleErrors(error, request) {
2517 if (request.timedout) {
2518 return {
2519 status: request.status,
2520 message: 'HLS request timed-out at URL: ' + request.uri,
2521 code: REQUEST_ERRORS.TIMEOUT,
2522 xhr: request
2523 };
2524 }
2525
2526 if (request.aborted) {
2527 return {
2528 status: request.status,
2529 message: 'HLS request aborted at URL: ' + request.uri,
2530 code: REQUEST_ERRORS.ABORTED,
2531 xhr: request
2532 };
2533 }
2534
2535 if (error) {
2536 return {
2537 status: request.status,
2538 message: 'HLS request errored at URL: ' + request.uri,
2539 code: REQUEST_ERRORS.FAILURE,
2540 xhr: request
2541 };
2542 }
2543
2544 return null;
2545};
2546
2547/**
2548 * Handle responses for key data and convert the key data to the correct format
2549 * for the decryption step later
2550 *
2551 * @param {Object} segment - a simplified copy of the segmentInfo object
2552 * from SegmentLoader
2553 * @param {Function} finishProcessingFn - a callback to execute to continue processing
2554 * this request
2555 */
2556var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
2557 return function (error, request) {
2558 var response = request.response;
2559 var errorObj = handleErrors(error, request);
2560
2561 if (errorObj) {
2562 return finishProcessingFn(errorObj, segment);
2563 }
2564
2565 if (response.byteLength !== 16) {
2566 return finishProcessingFn({
2567 status: request.status,
2568 message: 'Invalid HLS key at URL: ' + request.uri,
2569 code: REQUEST_ERRORS.FAILURE,
2570 xhr: request
2571 }, segment);
2572 }
2573
2574 var view = new DataView(response);
2575
2576 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
2577 return finishProcessingFn(null, segment);
2578 };
2579};
2580
2581/**
2582 * Handle init-segment responses
2583 *
2584 * @param {Object} segment - a simplified copy of the segmentInfo object
2585 * from SegmentLoader
2586 * @param {Function} finishProcessingFn - a callback to execute to continue processing
2587 * this request
2588 */
2589var handleInitSegmentResponse = function handleInitSegmentResponse(segment, finishProcessingFn) {
2590 return function (error, request) {
2591 var response = request.response;
2592 var errorObj = handleErrors(error, request);
2593
2594 if (errorObj) {
2595 return finishProcessingFn(errorObj, segment);
2596 }
2597
2598 // stop processing if received empty content
2599 if (response.byteLength === 0) {
2600 return finishProcessingFn({
2601 status: request.status,
2602 message: 'Empty HLS segment content at URL: ' + request.uri,
2603 code: REQUEST_ERRORS.FAILURE,
2604 xhr: request
2605 }, segment);
2606 }
2607
2608 segment.map.bytes = new Uint8Array(request.response);
2609 return finishProcessingFn(null, segment);
2610 };
2611};
2612
2613/**
2614 * Response handler for segment-requests being sure to set the correct
2615 * property depending on whether the segment is encryped or not
2616 * Also records and keeps track of stats that are used for ABR purposes
2617 *
2618 * @param {Object} segment - a simplified copy of the segmentInfo object
2619 * from SegmentLoader
2620 * @param {Function} finishProcessingFn - a callback to execute to continue processing
2621 * this request
2622 */
2623var handleSegmentResponse = function handleSegmentResponse(segment, finishProcessingFn) {
2624 return function (error, request) {
2625 var response = request.response;
2626 var errorObj = handleErrors(error, request);
2627
2628 if (errorObj) {
2629 return finishProcessingFn(errorObj, segment);
2630 }
2631
2632 // stop processing if received empty content
2633 if (response.byteLength === 0) {
2634 return finishProcessingFn({
2635 status: request.status,
2636 message: 'Empty HLS segment content at URL: ' + request.uri,
2637 code: REQUEST_ERRORS.FAILURE,
2638 xhr: request
2639 }, segment);
2640 }
2641
2642 segment.stats = getRequestStats(request);
2643
2644 if (segment.key) {
2645 segment.encryptedBytes = new Uint8Array(request.response);
2646 } else {
2647 segment.bytes = new Uint8Array(request.response);
2648 }
2649
2650 return finishProcessingFn(null, segment);
2651 };
2652};
2653
2654/**
2655 * Decrypt the segment via the decryption web worker
2656 *
2657 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
2658 * @param {Object} segment - a simplified copy of the segmentInfo object
2659 * from SegmentLoader
2660 * @param {Function} doneFn - a callback that is executed after decryption has completed
2661 */
2662var decryptSegment = function decryptSegment(decrypter, segment, doneFn) {
2663 var decryptionHandler = function decryptionHandler(event) {
2664 if (event.data.source === segment.requestId) {
2665 decrypter.removeEventListener('message', decryptionHandler);
2666 var decrypted = event.data.decrypted;
2667
2668 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
2669 return doneFn(null, segment);
2670 }
2671 };
2672
2673 decrypter.addEventListener('message', decryptionHandler);
2674
2675 // this is an encrypted segment
2676 // incrementally decrypt the segment
2677 decrypter.postMessage((0, _binUtils.createTransferableMessage)({
2678 source: segment.requestId,
2679 encrypted: segment.encryptedBytes,
2680 key: segment.key.bytes,
2681 iv: segment.key.iv
2682 }), [segment.encryptedBytes.buffer, segment.key.bytes.buffer]);
2683};
2684
2685/**
2686 * The purpose of this function is to get the most pertinent error from the
2687 * array of errors.
2688 * For instance if a timeout and two aborts occur, then the aborts were
2689 * likely triggered by the timeout so return that error object.
2690 */
2691var getMostImportantError = function getMostImportantError(errors) {
2692 return errors.reduce(function (prev, err) {
2693 return err.code > prev.code ? err : prev;
2694 });
2695};
2696
2697/**
2698 * This function waits for all XHRs to finish (with either success or failure)
2699 * before continueing processing via it's callback. The function gathers errors
2700 * from each request into a single errors array so that the error status for
2701 * each request can be examined later.
2702 *
2703 * @param {Object} activeXhrs - an object that tracks all XHR requests
2704 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
2705 * @param {Function} doneFn - a callback that is executed after all resources have been
2706 * downloaded and any decryption completed
2707 */
2708var waitForCompletion = function waitForCompletion(activeXhrs, decrypter, doneFn) {
2709 var errors = [];
2710 var count = 0;
2711
2712 return function (error, segment) {
2713 if (error) {
2714 // If there are errors, we have to abort any outstanding requests
2715 abortAll(activeXhrs);
2716 errors.push(error);
2717 }
2718 count += 1;
2719
2720 if (count === activeXhrs.length) {
2721 // Keep track of when *all* of the requests have completed
2722 segment.endOfAllRequests = Date.now();
2723
2724 if (errors.length > 0) {
2725 var worstError = getMostImportantError(errors);
2726
2727 return doneFn(worstError, segment);
2728 }
2729 if (segment.encryptedBytes) {
2730 return decryptSegment(decrypter, segment, doneFn);
2731 }
2732 // Otherwise, everything is ready just continue
2733 return doneFn(null, segment);
2734 }
2735 };
2736};
2737
2738/**
2739 * Simple progress event callback handler that gathers some stats before
2740 * executing a provided callback with the `segment` object
2741 *
2742 * @param {Object} segment - a simplified copy of the segmentInfo object
2743 * from SegmentLoader
2744 * @param {Function} progressFn - a callback that is executed each time a progress event
2745 * is received
2746 * @param {Event} event - the progress event object from XMLHttpRequest
2747 */
2748var handleProgress = function handleProgress(segment, progressFn) {
2749 return function (event) {
2750 segment.stats = _videoJs2['default'].mergeOptions(segment.stats, getProgressStats(event));
2751
2752 // record the time that we receive the first byte of data
2753 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
2754 segment.stats.firstBytesReceivedAt = Date.now();
2755 }
2756
2757 return progressFn(event, segment);
2758 };
2759};
2760
2761/**
2762 * Load all resources and does any processing necessary for a media-segment
2763 *
2764 * Features:
2765 * decrypts the media-segment if it has a key uri and an iv
2766 * aborts *all* requests if *any* one request fails
2767 *
2768 * The segment object, at minimum, has the following format:
2769 * {
2770 * resolvedUri: String,
2771 * [byterange]: {
2772 * offset: Number,
2773 * length: Number
2774 * },
2775 * [key]: {
2776 * resolvedUri: String
2777 * [byterange]: {
2778 * offset: Number,
2779 * length: Number
2780 * },
2781 * iv: {
2782 * bytes: Uint32Array
2783 * }
2784 * },
2785 * [map]: {
2786 * resolvedUri: String,
2787 * [byterange]: {
2788 * offset: Number,
2789 * length: Number
2790 * },
2791 * [bytes]: Uint8Array
2792 * }
2793 * }
2794 * ...where [name] denotes optional properties
2795 *
2796 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
2797 * @param {Object} xhrOptions - the base options to provide to all xhr requests
2798 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
2799 * decryption routines
2800 * @param {Object} segment - a simplified copy of the segmentInfo object
2801 * from SegmentLoader
2802 * @param {Function} progressFn - a callback that receives progress events from the main
2803 * segment's xhr request
2804 * @param {Function} doneFn - a callback that is executed only once all requests have
2805 * succeeded or failed
2806 * @returns {Function} a function that, when invoked, immediately aborts all
2807 * outstanding requests
2808 */
2809var mediaSegmentRequest = function mediaSegmentRequest(xhr, xhrOptions, decryptionWorker, segment, progressFn, doneFn) {
2810 var activeXhrs = [];
2811 var finishProcessingFn = waitForCompletion(activeXhrs, decryptionWorker, doneFn);
2812
2813 // optionally, request the decryption key
2814 if (segment.key) {
2815 var keyRequestOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
2816 uri: segment.key.resolvedUri,
2817 responseType: 'arraybuffer'
2818 });
2819 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
2820 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
2821
2822 activeXhrs.push(keyXhr);
2823 }
2824
2825 // optionally, request the associated media init segment
2826 if (segment.map && !segment.map.bytes) {
2827 var initSegmentOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
2828 uri: segment.map.resolvedUri,
2829 responseType: 'arraybuffer',
2830 headers: segmentXhrHeaders(segment.map)
2831 });
2832 var initSegmentRequestCallback = handleInitSegmentResponse(segment, finishProcessingFn);
2833 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
2834
2835 activeXhrs.push(initSegmentXhr);
2836 }
2837
2838 var segmentRequestOptions = _videoJs2['default'].mergeOptions(xhrOptions, {
2839 uri: segment.resolvedUri,
2840 responseType: 'arraybuffer',
2841 headers: segmentXhrHeaders(segment)
2842 });
2843 var segmentRequestCallback = handleSegmentResponse(segment, finishProcessingFn);
2844 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
2845
2846 segmentXhr.addEventListener('progress', handleProgress(segment, progressFn));
2847 activeXhrs.push(segmentXhr);
2848
2849 return function () {
2850 return abortAll(activeXhrs);
2851 };
2852};
2853exports.mediaSegmentRequest = mediaSegmentRequest;
2854}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
2855},{"./bin-utils":2}],8:[function(require,module,exports){
2856(function (global){
2857/**
2858 * @file playback-watcher.js
2859 *
2860 * Playback starts, and now my watch begins. It shall not end until my death. I shall
2861 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
2862 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
2863 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
2864 * my life and honor to the Playback Watch, for this Player and all the Players to come.
2865 */
2866
2867'use strict';
2868
2869Object.defineProperty(exports, '__esModule', {
2870 value: true
2871});
2872
2873var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
2874
2875function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
2876
2877function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
2878
2879var _globalWindow = require('global/window');
2880
2881var _globalWindow2 = _interopRequireDefault(_globalWindow);
2882
2883var _ranges = require('./ranges');
2884
2885var _ranges2 = _interopRequireDefault(_ranges);
2886
2887var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
2888
2889var _videoJs2 = _interopRequireDefault(_videoJs);
2890
2891// Set of events that reset the playback-watcher time check logic and clear the timeout
2892var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
2893
2894/**
2895 * @class PlaybackWatcher
2896 */
2897
2898var PlaybackWatcher = (function () {
2899 /**
2900 * Represents an PlaybackWatcher object.
2901 * @constructor
2902 * @param {object} options an object that includes the tech and settings
2903 */
2904
2905 function PlaybackWatcher(options) {
2906 var _this = this;
2907
2908 _classCallCheck(this, PlaybackWatcher);
2909
2910 this.tech_ = options.tech;
2911 this.seekable = options.seekable;
2912
2913 this.consecutiveUpdates = 0;
2914 this.lastRecordedTime = null;
2915 this.timer_ = null;
2916 this.checkCurrentTimeTimeout_ = null;
2917
2918 if (options.debug) {
2919 this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'playback-watcher ->');
2920 }
2921 this.logger_('initialize');
2922
2923 var canPlayHandler = function canPlayHandler() {
2924 return _this.monitorCurrentTime_();
2925 };
2926 var waitingHandler = function waitingHandler() {
2927 return _this.techWaiting_();
2928 };
2929 var cancelTimerHandler = function cancelTimerHandler() {
2930 return _this.cancelTimer_();
2931 };
2932 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
2933 return _this.fixesBadSeeks_();
2934 };
2935
2936 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
2937 this.tech_.on('waiting', waitingHandler);
2938 this.tech_.on(timerCancelEvents, cancelTimerHandler);
2939 this.tech_.on('canplay', canPlayHandler);
2940
2941 // Define the dispose function to clean up our events
2942 this.dispose = function () {
2943 _this.logger_('dispose');
2944 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
2945 _this.tech_.off('waiting', waitingHandler);
2946 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
2947 _this.tech_.off('canplay', canPlayHandler);
2948 if (_this.checkCurrentTimeTimeout_) {
2949 _globalWindow2['default'].clearTimeout(_this.checkCurrentTimeTimeout_);
2950 }
2951 _this.cancelTimer_();
2952 };
2953 }
2954
2955 /**
2956 * Periodically check current time to see if playback stopped
2957 *
2958 * @private
2959 */
2960
2961 _createClass(PlaybackWatcher, [{
2962 key: 'monitorCurrentTime_',
2963 value: function monitorCurrentTime_() {
2964 this.checkCurrentTime_();
2965
2966 if (this.checkCurrentTimeTimeout_) {
2967 _globalWindow2['default'].clearTimeout(this.checkCurrentTimeTimeout_);
2968 }
2969
2970 // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
2971 this.checkCurrentTimeTimeout_ = _globalWindow2['default'].setTimeout(this.monitorCurrentTime_.bind(this), 250);
2972 }
2973
2974 /**
2975 * The purpose of this function is to emulate the "waiting" event on
2976 * browsers that do not emit it when they are waiting for more
2977 * data to continue playback
2978 *
2979 * @private
2980 */
2981 }, {
2982 key: 'checkCurrentTime_',
2983 value: function checkCurrentTime_() {
2984 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
2985 this.consecutiveUpdates = 0;
2986 this.lastRecordedTime = this.tech_.currentTime();
2987 return;
2988 }
2989
2990 if (this.tech_.paused() || this.tech_.seeking()) {
2991 return;
2992 }
2993
2994 var currentTime = this.tech_.currentTime();
2995 var buffered = this.tech_.buffered();
2996
2997 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + _ranges2['default'].SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
2998 // If current time is at the end of the final buffered region, then any playback
2999 // stall is most likely caused by buffering in a low bandwidth environment. The tech
3000 // should fire a `waiting` event in this scenario, but due to browser and tech
3001 // inconsistencies (e.g. The Flash tech does not fire a `waiting` event when the end
3002 // of the buffer is reached and has fallen off the live window). Calling
3003 // `techWaiting_` here allows us to simulate responding to a native `waiting` event
3004 // when the tech fails to emit one.
3005 return this.techWaiting_();
3006 }
3007
3008 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
3009 this.consecutiveUpdates++;
3010 this.waiting_();
3011 } else if (currentTime === this.lastRecordedTime) {
3012 this.consecutiveUpdates++;
3013 } else {
3014 this.consecutiveUpdates = 0;
3015 this.lastRecordedTime = currentTime;
3016 }
3017 }
3018
3019 /**
3020 * Cancels any pending timers and resets the 'timeupdate' mechanism
3021 * designed to detect that we are stalled
3022 *
3023 * @private
3024 */
3025 }, {
3026 key: 'cancelTimer_',
3027 value: function cancelTimer_() {
3028 this.consecutiveUpdates = 0;
3029
3030 if (this.timer_) {
3031 this.logger_('cancelTimer_');
3032 clearTimeout(this.timer_);
3033 }
3034
3035 this.timer_ = null;
3036 }
3037
3038 /**
3039 * Fixes situations where there's a bad seek
3040 *
3041 * @return {Boolean} whether an action was taken to fix the seek
3042 * @private
3043 */
3044 }, {
3045 key: 'fixesBadSeeks_',
3046 value: function fixesBadSeeks_() {
3047 var seeking = this.tech_.seeking();
3048 var seekable = this.seekable();
3049 var currentTime = this.tech_.currentTime();
3050 var seekTo = undefined;
3051
3052 if (seeking && this.afterSeekableWindow_(seekable, currentTime)) {
3053 var seekableEnd = seekable.end(seekable.length - 1);
3054
3055 // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
3056 seekTo = seekableEnd;
3057 }
3058
3059 if (seeking && this.beforeSeekableWindow_(seekable, currentTime)) {
3060 var seekableStart = seekable.start(0);
3061
3062 // sync to the beginning of the live window
3063 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
3064 seekTo = seekableStart + _ranges2['default'].SAFE_TIME_DELTA;
3065 }
3066
3067 if (typeof seekTo !== 'undefined') {
3068 this.logger_('Trying to seek outside of seekable at time ' + currentTime + ' with ' + ('seekable range ' + _ranges2['default'].printableRange(seekable) + '. Seeking to ') + (seekTo + '.'));
3069
3070 this.tech_.setCurrentTime(seekTo);
3071 return true;
3072 }
3073
3074 return false;
3075 }
3076
3077 /**
3078 * Handler for situations when we determine the player is waiting.
3079 *
3080 * @private
3081 */
3082 }, {
3083 key: 'waiting_',
3084 value: function waiting_() {
3085 if (this.techWaiting_()) {
3086 return;
3087 }
3088
3089 // All tech waiting checks failed. Use last resort correction
3090 var currentTime = this.tech_.currentTime();
3091 var buffered = this.tech_.buffered();
3092 var currentRange = _ranges2['default'].findRange(buffered, currentTime);
3093
3094 // Sometimes the player can stall for unknown reasons within a contiguous buffered
3095 // region with no indication that anything is amiss (seen in Firefox). Seeking to
3096 // currentTime is usually enough to kickstart the player. This checks that the player
3097 // is currently within a buffered region before attempting a corrective seek.
3098 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
3099 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
3100 // make sure there is ~3 seconds of forward buffer before taking any corrective action
3101 // to avoid triggering an `unknownwaiting` event when the network is slow.
3102 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
3103 this.cancelTimer_();
3104 this.tech_.setCurrentTime(currentTime);
3105
3106 this.logger_('Stopped at ' + currentTime + ' while inside a buffered region ' + ('[' + currentRange.start(0) + ' -> ' + currentRange.end(0) + ']. Attempting to resume ') + 'playback by seeking to the current time.');
3107
3108 // unknown waiting corrections may be useful for monitoring QoS
3109 this.tech_.trigger({ type: 'usage', name: 'hls-unknown-waiting' });
3110 return;
3111 }
3112 }
3113
3114 /**
3115 * Handler for situations when the tech fires a `waiting` event
3116 *
3117 * @return {Boolean}
3118 * True if an action (or none) was needed to correct the waiting. False if no
3119 * checks passed
3120 * @private
3121 */
3122 }, {
3123 key: 'techWaiting_',
3124 value: function techWaiting_() {
3125 var seekable = this.seekable();
3126 var currentTime = this.tech_.currentTime();
3127
3128 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
3129 // Tech is seeking or bad seek fixed, no action needed
3130 return true;
3131 }
3132
3133 if (this.tech_.seeking() || this.timer_ !== null) {
3134 // Tech is seeking or already waiting on another action, no action needed
3135 return true;
3136 }
3137
3138 if (this.beforeSeekableWindow_(seekable, currentTime)) {
3139 var livePoint = seekable.end(seekable.length - 1);
3140
3141 this.logger_('Fell out of live window at time ' + currentTime + '. Seeking to ' + ('live point (seekable end) ' + livePoint));
3142 this.cancelTimer_();
3143 this.tech_.setCurrentTime(livePoint);
3144
3145 // live window resyncs may be useful for monitoring QoS
3146 this.tech_.trigger({ type: 'usage', name: 'hls-live-resync' });
3147 return true;
3148 }
3149
3150 var buffered = this.tech_.buffered();
3151 var nextRange = _ranges2['default'].findNextRange(buffered, currentTime);
3152
3153 if (this.videoUnderflow_(nextRange, buffered, currentTime)) {
3154 // Even though the video underflowed and was stuck in a gap, the audio overplayed
3155 // the gap, leading currentTime into a buffered range. Seeking to currentTime
3156 // allows the video to catch up to the audio position without losing any audio
3157 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
3158 this.cancelTimer_();
3159 this.tech_.setCurrentTime(currentTime);
3160
3161 // video underflow may be useful for monitoring QoS
3162 this.tech_.trigger({ type: 'usage', name: 'hls-video-underflow' });
3163 return true;
3164 }
3165
3166 // check for gap
3167 if (nextRange.length > 0) {
3168 var difference = nextRange.start(0) - currentTime;
3169
3170 this.logger_('Stopped at ' + currentTime + ', setting timer for ' + difference + ', seeking ' + ('to ' + nextRange.start(0)));
3171
3172 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
3173 return true;
3174 }
3175
3176 // All checks failed. Returning false to indicate failure to correct waiting
3177 return false;
3178 }
3179 }, {
3180 key: 'afterSeekableWindow_',
3181 value: function afterSeekableWindow_(seekable, currentTime) {
3182 if (!seekable.length) {
3183 // we can't make a solid case if there's no seekable, default to false
3184 return false;
3185 }
3186
3187 if (currentTime > seekable.end(seekable.length - 1) + _ranges2['default'].SAFE_TIME_DELTA) {
3188 return true;
3189 }
3190
3191 return false;
3192 }
3193 }, {
3194 key: 'beforeSeekableWindow_',
3195 value: function beforeSeekableWindow_(seekable, currentTime) {
3196 if (seekable.length &&
3197 // can't fall before 0 and 0 seekable start identifies VOD stream
3198 seekable.start(0) > 0 && currentTime < seekable.start(0) - _ranges2['default'].SAFE_TIME_DELTA) {
3199 return true;
3200 }
3201
3202 return false;
3203 }
3204 }, {
3205 key: 'videoUnderflow_',
3206 value: function videoUnderflow_(nextRange, buffered, currentTime) {
3207 if (nextRange.length === 0) {
3208 // Even if there is no available next range, there is still a possibility we are
3209 // stuck in a gap due to video underflow.
3210 var gap = this.gapFromVideoUnderflow_(buffered, currentTime);
3211
3212 if (gap) {
3213 this.logger_('Encountered a gap in video from ' + gap.start + ' to ' + gap.end + '. ' + ('Seeking to current time ' + currentTime));
3214
3215 return true;
3216 }
3217 }
3218
3219 return false;
3220 }
3221
3222 /**
3223 * Timer callback. If playback still has not proceeded, then we seek
3224 * to the start of the next buffered region.
3225 *
3226 * @private
3227 */
3228 }, {
3229 key: 'skipTheGap_',
3230 value: function skipTheGap_(scheduledCurrentTime) {
3231 var buffered = this.tech_.buffered();
3232 var currentTime = this.tech_.currentTime();
3233 var nextRange = _ranges2['default'].findNextRange(buffered, currentTime);
3234
3235 this.cancelTimer_();
3236
3237 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
3238 return;
3239 }
3240
3241 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0));
3242
3243 // only seek if we still have not played
3244 this.tech_.setCurrentTime(nextRange.start(0) + _ranges2['default'].TIME_FUDGE_FACTOR);
3245
3246 this.tech_.trigger({ type: 'usage', name: 'hls-gap-skip' });
3247 }
3248 }, {
3249 key: 'gapFromVideoUnderflow_',
3250 value: function gapFromVideoUnderflow_(buffered, currentTime) {
3251 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
3252 // playing for ~3 seconds after the video gap starts. This is done to account for
3253 // video buffer underflow/underrun (note that this is not done when there is audio
3254 // buffer underflow/underrun -- in that case the video will stop as soon as it
3255 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
3256 // video stalls). The player's time will reflect the playthrough of audio, so the
3257 // time will appear as if we are in a buffered region, even if we are stuck in a
3258 // "gap."
3259 //
3260 // Example:
3261 // video buffer: 0 => 10.1, 10.2 => 20
3262 // audio buffer: 0 => 20
3263 // overall buffer: 0 => 10.1, 10.2 => 20
3264 // current time: 13
3265 //
3266 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
3267 // however, the audio continued playing until it reached ~3 seconds past the gap
3268 // (13 seconds), at which point it stops as well. Since current time is past the
3269 // gap, findNextRange will return no ranges.
3270 //
3271 // To check for this issue, we see if there is a gap that starts somewhere within
3272 // a 3 second range (3 seconds +/- 1 second) back from our current time.
3273 var gaps = _ranges2['default'].findGaps(buffered);
3274
3275 for (var i = 0; i < gaps.length; i++) {
3276 var start = gaps.start(i);
3277 var end = gaps.end(i);
3278
3279 // gap is starts no more than 4 seconds back
3280 if (currentTime - start < 4 && currentTime - start > 2) {
3281 return {
3282 start: start,
3283 end: end
3284 };
3285 }
3286 }
3287
3288 return null;
3289 }
3290
3291 /**
3292 * A debugging logger noop that is set to console.log only if debugging
3293 * is enabled globally
3294 *
3295 * @private
3296 */
3297 }, {
3298 key: 'logger_',
3299 value: function logger_() {}
3300 }]);
3301
3302 return PlaybackWatcher;
3303})();
3304
3305exports['default'] = PlaybackWatcher;
3306module.exports = exports['default'];
3307}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
3308},{"./ranges":12,"global/window":32}],9:[function(require,module,exports){
3309(function (global){
3310/**
3311 * @file playlist-loader.js
3312 *
3313 * A state machine that manages the loading, caching, and updating of
3314 * M3U8 playlists.
3315 *
3316 */
3317'use strict';
3318
3319Object.defineProperty(exports, '__esModule', {
3320 value: true
3321});
3322
3323var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
3324
3325var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
3326
3327function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
3328
3329function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
3330
3331function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
3332
3333var _resolveUrl = require('./resolve-url');
3334
3335var _resolveUrl2 = _interopRequireDefault(_resolveUrl);
3336
3337var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
3338
3339var _playlistJs = require('./playlist.js');
3340
3341var _m3u8Parser = require('m3u8-parser');
3342
3343var _m3u8Parser2 = _interopRequireDefault(_m3u8Parser);
3344
3345var _globalWindow = require('global/window');
3346
3347var _globalWindow2 = _interopRequireDefault(_globalWindow);
3348
3349/**
3350 * Returns a new array of segments that is the result of merging
3351 * properties from an older list of segments onto an updated
3352 * list. No properties on the updated playlist will be overridden.
3353 *
3354 * @param {Array} original the outdated list of segments
3355 * @param {Array} update the updated list of segments
3356 * @param {Number=} offset the index of the first update
3357 * segment in the original segment list. For non-live playlists,
3358 * this should always be zero and does not need to be
3359 * specified. For live playlists, it should be the difference
3360 * between the media sequence numbers in the original and updated
3361 * playlists.
3362 * @return a list of merged segment objects
3363 */
3364var updateSegments = function updateSegments(original, update, offset) {
3365 var result = update.slice();
3366
3367 offset = offset || 0;
3368 var length = Math.min(original.length, update.length + offset);
3369
3370 for (var i = offset; i < length; i++) {
3371 result[i - offset] = (0, _videoJs.mergeOptions)(original[i], result[i - offset]);
3372 }
3373 return result;
3374};
3375
3376exports.updateSegments = updateSegments;
3377/**
3378 * Returns a new master playlist that is the result of merging an
3379 * updated media playlist into the original version. If the
3380 * updated media playlist does not match any of the playlist
3381 * entries in the original master playlist, null is returned.
3382 *
3383 * @param {Object} master a parsed master M3U8 object
3384 * @param {Object} media a parsed media M3U8 object
3385 * @return {Object} a new object that represents the original
3386 * master playlist with the updated media playlist merged in, or
3387 * null if the merge produced no change.
3388 */
3389var updateMaster = function updateMaster(master, media) {
3390 var result = (0, _videoJs.mergeOptions)(master, {});
3391 var playlist = result.playlists.filter(function (p) {
3392 return p.uri === media.uri;
3393 })[0];
3394
3395 if (!playlist) {
3396 return null;
3397 }
3398
3399 // consider the playlist unchanged if the number of segments is equal and the media
3400 // sequence number is unchanged
3401 if (playlist.segments && media.segments && playlist.segments.length === media.segments.length && playlist.mediaSequence === media.mediaSequence) {
3402 return null;
3403 }
3404
3405 var mergedPlaylist = (0, _videoJs.mergeOptions)(playlist, media);
3406
3407 // if the update could overlap existing segment information, merge the two segment lists
3408 if (playlist.segments) {
3409 mergedPlaylist.segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
3410 }
3411
3412 // resolve any segment URIs to prevent us from having to do it later
3413 mergedPlaylist.segments.forEach(function (segment) {
3414 if (!segment.resolvedUri) {
3415 segment.resolvedUri = (0, _resolveUrl2['default'])(mergedPlaylist.resolvedUri, segment.uri);
3416 }
3417 if (segment.key && !segment.key.resolvedUri) {
3418 segment.key.resolvedUri = (0, _resolveUrl2['default'])(mergedPlaylist.resolvedUri, segment.key.uri);
3419 }
3420 if (segment.map && !segment.map.resolvedUri) {
3421 segment.map.resolvedUri = (0, _resolveUrl2['default'])(mergedPlaylist.resolvedUri, segment.map.uri);
3422 }
3423 });
3424
3425 // TODO Right now in the playlists array there are two references to each playlist, one
3426 // that is referenced by index, and one by URI. The index reference may no longer be
3427 // necessary.
3428 for (var i = 0; i < result.playlists.length; i++) {
3429 if (result.playlists[i].uri === media.uri) {
3430 result.playlists[i] = mergedPlaylist;
3431 }
3432 }
3433 result.playlists[media.uri] = mergedPlaylist;
3434
3435 return result;
3436};
3437
3438exports.updateMaster = updateMaster;
3439var setupMediaPlaylists = function setupMediaPlaylists(master) {
3440 // setup by-URI lookups and resolve media playlist URIs
3441 var i = master.playlists.length;
3442
3443 while (i--) {
3444 var playlist = master.playlists[i];
3445
3446 master.playlists[playlist.uri] = playlist;
3447 playlist.resolvedUri = (0, _resolveUrl2['default'])(master.uri, playlist.uri);
3448
3449 if (!playlist.attributes) {
3450 // Although the spec states an #EXT-X-STREAM-INF tag MUST have a
3451 // BANDWIDTH attribute, we can play the stream without it. This means a poorly
3452 // formatted master playlist may not have an attribute list. An attributes
3453 // property is added here to prevent undefined references when we encounter
3454 // this scenario.
3455 playlist.attributes = {};
3456
3457 _videoJs.log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
3458 }
3459 }
3460};
3461
3462exports.setupMediaPlaylists = setupMediaPlaylists;
3463var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
3464 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
3465 for (var groupKey in master.mediaGroups[mediaType]) {
3466 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
3467 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
3468
3469 if (mediaProperties.uri) {
3470 mediaProperties.resolvedUri = (0, _resolveUrl2['default'])(master.uri, mediaProperties.uri);
3471 }
3472 }
3473 }
3474 });
3475};
3476
3477exports.resolveMediaGroupUris = resolveMediaGroupUris;
3478/**
3479 * Calculates the time to wait before refreshing a live playlist
3480 *
3481 * @param {Object} media
3482 * The current media
3483 * @param {Boolean} update
3484 * True if there were any updates from the last refresh, false otherwise
3485 * @return {Number}
3486 * The time in ms to wait before refreshing the live playlist
3487 */
3488var refreshDelay = function refreshDelay(media, update) {
3489 var lastSegment = media.segments[media.segments.length - 1];
3490 var delay = undefined;
3491
3492 if (update && lastSegment && lastSegment.duration) {
3493 delay = lastSegment.duration * 1000;
3494 } else {
3495 // if the playlist is unchanged since the last reload or last segment duration
3496 // cannot be determined, try again after half the target duration
3497 delay = (media.targetDuration || 10) * 500;
3498 }
3499 return delay;
3500};
3501
3502exports.refreshDelay = refreshDelay;
3503/**
3504 * Load a playlist from a remote location
3505 *
3506 * @class PlaylistLoader
3507 * @extends Stream
3508 * @param {String} srcUrl the url to start with
3509 * @param {Boolean} withCredentials the withCredentials xhr option
3510 * @constructor
3511 */
3512
3513var PlaylistLoader = (function (_EventTarget) {
3514 _inherits(PlaylistLoader, _EventTarget);
3515
3516 function PlaylistLoader(srcUrl, hls, withCredentials) {
3517 var _this = this;
3518
3519 _classCallCheck(this, PlaylistLoader);
3520
3521 _get(Object.getPrototypeOf(PlaylistLoader.prototype), 'constructor', this).call(this);
3522
3523 this.srcUrl = srcUrl;
3524 this.hls_ = hls;
3525 this.withCredentials = withCredentials;
3526
3527 if (!this.srcUrl) {
3528 throw new Error('A non-empty playlist URL is required');
3529 }
3530
3531 // initialize the loader state
3532 this.state = 'HAVE_NOTHING';
3533
3534 // live playlist staleness timeout
3535 this.on('mediaupdatetimeout', function () {
3536 if (_this.state !== 'HAVE_METADATA') {
3537 // only refresh the media playlist if no other activity is going on
3538 return;
3539 }
3540
3541 _this.state = 'HAVE_CURRENT_METADATA';
3542
3543 _this.request = _this.hls_.xhr({
3544 uri: (0, _resolveUrl2['default'])(_this.master.uri, _this.media().uri),
3545 withCredentials: _this.withCredentials
3546 }, function (error, req) {
3547 // disposed
3548 if (!_this.request) {
3549 return;
3550 }
3551
3552 if (error) {
3553 return _this.playlistRequestError(_this.request, _this.media().uri, 'HAVE_METADATA');
3554 }
3555
3556 _this.haveMetadata(_this.request, _this.media().uri);
3557 });
3558 });
3559 }
3560
3561 _createClass(PlaylistLoader, [{
3562 key: 'playlistRequestError',
3563 value: function playlistRequestError(xhr, url, startingState) {
3564 // any in-flight request is now finished
3565 this.request = null;
3566
3567 if (startingState) {
3568 this.state = startingState;
3569 }
3570
3571 this.error = {
3572 playlist: this.master.playlists[url],
3573 status: xhr.status,
3574 message: 'HLS playlist request error at URL: ' + url,
3575 responseText: xhr.responseText,
3576 code: xhr.status >= 500 ? 4 : 2
3577 };
3578
3579 this.trigger('error');
3580 }
3581
3582 // update the playlist loader's state in response to a new or
3583 // updated playlist.
3584 }, {
3585 key: 'haveMetadata',
3586 value: function haveMetadata(xhr, url) {
3587 var _this2 = this;
3588
3589 // any in-flight request is now finished
3590 this.request = null;
3591 this.state = 'HAVE_METADATA';
3592
3593 var parser = new _m3u8Parser2['default'].Parser();
3594
3595 parser.push(xhr.responseText);
3596 parser.end();
3597 parser.manifest.uri = url;
3598 // m3u8-parser does not attach an attributes property to media playlists so make
3599 // sure that the property is attached to avoid undefined reference errors
3600 parser.manifest.attributes = parser.manifest.attributes || {};
3601
3602 // merge this playlist into the master
3603 var update = updateMaster(this.master, parser.manifest);
3604
3605 this.targetDuration = parser.manifest.targetDuration;
3606
3607 if (update) {
3608 this.master = update;
3609 this.media_ = this.master.playlists[parser.manifest.uri];
3610 } else {
3611 this.trigger('playlistunchanged');
3612 }
3613
3614 // refresh live playlists after a target duration passes
3615 if (!this.media().endList) {
3616 _globalWindow2['default'].clearTimeout(this.mediaUpdateTimeout);
3617 this.mediaUpdateTimeout = _globalWindow2['default'].setTimeout(function () {
3618 _this2.trigger('mediaupdatetimeout');
3619 }, refreshDelay(this.media(), !!update));
3620 }
3621
3622 this.trigger('loadedplaylist');
3623 }
3624
3625 /**
3626 * Abort any outstanding work and clean up.
3627 */
3628 }, {
3629 key: 'dispose',
3630 value: function dispose() {
3631 this.stopRequest();
3632 _globalWindow2['default'].clearTimeout(this.mediaUpdateTimeout);
3633 }
3634 }, {
3635 key: 'stopRequest',
3636 value: function stopRequest() {
3637 if (this.request) {
3638 var oldRequest = this.request;
3639
3640 this.request = null;
3641 oldRequest.onreadystatechange = null;
3642 oldRequest.abort();
3643 }
3644 }
3645
3646 /**
3647 * Returns the number of enabled playlists on the master playlist object
3648 *
3649 * @return {Number} number of eneabled playlists
3650 */
3651 }, {
3652 key: 'enabledPlaylists_',
3653 value: function enabledPlaylists_() {
3654 return this.master.playlists.filter(_playlistJs.isEnabled).length;
3655 }
3656
3657 /**
3658 * Returns whether the current playlist is the lowest rendition
3659 *
3660 * @return {Boolean} true if on lowest rendition
3661 */
3662 }, {
3663 key: 'isLowestEnabledRendition_',
3664 value: function isLowestEnabledRendition_() {
3665 if (this.master.playlists.length === 1) {
3666 return true;
3667 }
3668
3669 var currentBandwidth = this.media().attributes.BANDWIDTH || Number.MAX_VALUE;
3670
3671 return this.master.playlists.filter(function (playlist) {
3672 if (!(0, _playlistJs.isEnabled)(playlist)) {
3673 return false;
3674 }
3675
3676 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3677 }).length === 0;
3678 }
3679
3680 /**
3681 * Returns whether the current playlist is the final available rendition
3682 *
3683 * @return {Boolean} true if on final rendition
3684 */
3685 }, {
3686 key: 'isFinalRendition_',
3687 value: function isFinalRendition_() {
3688 return this.master.playlists.filter(_playlistJs.isEnabled).length === 1;
3689 }
3690
3691 /**
3692 * When called without any arguments, returns the currently
3693 * active media playlist. When called with a single argument,
3694 * triggers the playlist loader to asynchronously switch to the
3695 * specified media playlist. Calling this method while the
3696 * loader is in the HAVE_NOTHING causes an error to be emitted
3697 * but otherwise has no effect.
3698 *
3699 * @param {Object=} playlist the parsed media playlist
3700 * object to switch to
3701 * @return {Playlist} the current loaded media
3702 */
3703 }, {
3704 key: 'media',
3705 value: function media(playlist) {
3706 var _this3 = this;
3707
3708 // getter
3709 if (!playlist) {
3710 return this.media_;
3711 }
3712
3713 // setter
3714 if (this.state === 'HAVE_NOTHING') {
3715 throw new Error('Cannot switch media playlist from ' + this.state);
3716 }
3717
3718 var startingState = this.state;
3719
3720 // find the playlist object if the target playlist has been
3721 // specified by URI
3722 if (typeof playlist === 'string') {
3723 if (!this.master.playlists[playlist]) {
3724 throw new Error('Unknown playlist URI: ' + playlist);
3725 }
3726 playlist = this.master.playlists[playlist];
3727 }
3728
3729 var mediaChange = !this.media_ || playlist.uri !== this.media_.uri;
3730
3731 // switch to fully loaded playlists immediately
3732 if (this.master.playlists[playlist.uri].endList) {
3733 // abort outstanding playlist requests
3734 if (this.request) {
3735 this.request.onreadystatechange = null;
3736 this.request.abort();
3737 this.request = null;
3738 }
3739 this.state = 'HAVE_METADATA';
3740 this.media_ = playlist;
3741
3742 // trigger media change if the active media has been updated
3743 if (mediaChange) {
3744 this.trigger('mediachanging');
3745 this.trigger('mediachange');
3746 }
3747 return;
3748 }
3749
3750 // switching to the active playlist is a no-op
3751 if (!mediaChange) {
3752 return;
3753 }
3754
3755 this.state = 'SWITCHING_MEDIA';
3756
3757 // there is already an outstanding playlist request
3758 if (this.request) {
3759 if ((0, _resolveUrl2['default'])(this.master.uri, playlist.uri) === this.request.url) {
3760 // requesting to switch to the same playlist multiple times
3761 // has no effect after the first
3762 return;
3763 }
3764 this.request.onreadystatechange = null;
3765 this.request.abort();
3766 this.request = null;
3767 }
3768
3769 // request the new playlist
3770 if (this.media_) {
3771 this.trigger('mediachanging');
3772 }
3773
3774 this.request = this.hls_.xhr({
3775 uri: (0, _resolveUrl2['default'])(this.master.uri, playlist.uri),
3776 withCredentials: this.withCredentials
3777 }, function (error, req) {
3778 // disposed
3779 if (!_this3.request) {
3780 return;
3781 }
3782
3783 if (error) {
3784 return _this3.playlistRequestError(_this3.request, playlist.uri, startingState);
3785 }
3786
3787 _this3.haveMetadata(req, playlist.uri);
3788
3789 // fire loadedmetadata the first time a media playlist is loaded
3790 if (startingState === 'HAVE_MASTER') {
3791 _this3.trigger('loadedmetadata');
3792 } else {
3793 _this3.trigger('mediachange');
3794 }
3795 });
3796 }
3797
3798 /**
3799 * pause loading of the playlist
3800 */
3801 }, {
3802 key: 'pause',
3803 value: function pause() {
3804 this.stopRequest();
3805 _globalWindow2['default'].clearTimeout(this.mediaUpdateTimeout);
3806 if (this.state === 'HAVE_NOTHING') {
3807 // If we pause the loader before any data has been retrieved, its as if we never
3808 // started, so reset to an unstarted state.
3809 this.started = false;
3810 }
3811 // Need to restore state now that no activity is happening
3812 if (this.state === 'SWITCHING_MEDIA') {
3813 // if the loader was in the process of switching media, it should either return to
3814 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
3815 // playlist yet. This is determined by the existence of loader.media_
3816 if (this.media_) {
3817 this.state = 'HAVE_METADATA';
3818 } else {
3819 this.state = 'HAVE_MASTER';
3820 }
3821 } else if (this.state === 'HAVE_CURRENT_METADATA') {
3822 this.state = 'HAVE_METADATA';
3823 }
3824 }
3825
3826 /**
3827 * start loading of the playlist
3828 */
3829 }, {
3830 key: 'load',
3831 value: function load(isFinalRendition) {
3832 var _this4 = this;
3833
3834 _globalWindow2['default'].clearTimeout(this.mediaUpdateTimeout);
3835
3836 var media = this.media();
3837
3838 if (isFinalRendition) {
3839 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3840
3841 this.mediaUpdateTimeout = _globalWindow2['default'].setTimeout(function () {
3842 return _this4.load();
3843 }, delay);
3844 return;
3845 }
3846
3847 if (!this.started) {
3848 this.start();
3849 return;
3850 }
3851
3852 if (media && !media.endList) {
3853 this.trigger('mediaupdatetimeout');
3854 } else {
3855 this.trigger('loadedplaylist');
3856 }
3857 }
3858
3859 /**
3860 * start loading of the playlist
3861 */
3862 }, {
3863 key: 'start',
3864 value: function start() {
3865 var _this5 = this;
3866
3867 this.started = true;
3868
3869 // request the specified URL
3870 this.request = this.hls_.xhr({
3871 uri: this.srcUrl,
3872 withCredentials: this.withCredentials
3873 }, function (error, req) {
3874 // disposed
3875 if (!_this5.request) {
3876 return;
3877 }
3878
3879 // clear the loader's request reference
3880 _this5.request = null;
3881
3882 if (error) {
3883 _this5.error = {
3884 status: req.status,
3885 message: 'HLS playlist request error at URL: ' + _this5.srcUrl,
3886 responseText: req.responseText,
3887 // MEDIA_ERR_NETWORK
3888 code: 2
3889 };
3890 if (_this5.state === 'HAVE_NOTHING') {
3891 _this5.started = false;
3892 }
3893 return _this5.trigger('error');
3894 }
3895
3896 var parser = new _m3u8Parser2['default'].Parser();
3897
3898 parser.push(req.responseText);
3899 parser.end();
3900
3901 _this5.state = 'HAVE_MASTER';
3902
3903 parser.manifest.uri = _this5.srcUrl;
3904
3905 // loaded a master playlist
3906 if (parser.manifest.playlists) {
3907 _this5.master = parser.manifest;
3908
3909 setupMediaPlaylists(_this5.master);
3910 resolveMediaGroupUris(_this5.master);
3911
3912 _this5.trigger('loadedplaylist');
3913 if (!_this5.request) {
3914 // no media playlist was specifically selected so start
3915 // from the first listed one
3916 _this5.media(parser.manifest.playlists[0]);
3917 }
3918 return;
3919 }
3920
3921 // loaded a media playlist
3922 // infer a master playlist if none was previously requested
3923 _this5.master = {
3924 mediaGroups: {
3925 'AUDIO': {},
3926 'VIDEO': {},
3927 'CLOSED-CAPTIONS': {},
3928 'SUBTITLES': {}
3929 },
3930 uri: _globalWindow2['default'].location.href,
3931 playlists: [{
3932 uri: _this5.srcUrl
3933 }]
3934 };
3935 _this5.master.playlists[_this5.srcUrl] = _this5.master.playlists[0];
3936 _this5.master.playlists[0].resolvedUri = _this5.srcUrl;
3937 // m3u8-parser does not attach an attributes property to media playlists so make
3938 // sure that the property is attached to avoid undefined reference errors
3939 _this5.master.playlists[0].attributes = _this5.master.playlists[0].attributes || {};
3940 _this5.haveMetadata(req, _this5.srcUrl);
3941 return _this5.trigger('loadedmetadata');
3942 });
3943 }
3944 }]);
3945
3946 return PlaylistLoader;
3947})(_videoJs.EventTarget);
3948
3949exports['default'] = PlaylistLoader;
3950}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
3951},{"./playlist.js":11,"./resolve-url":15,"global/window":32,"m3u8-parser":33}],10:[function(require,module,exports){
3952'use strict';
3953
3954Object.defineProperty(exports, '__esModule', {
3955 value: true
3956});
3957
3958function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
3959
3960var _config = require('./config');
3961
3962var _config2 = _interopRequireDefault(_config);
3963
3964var _playlist = require('./playlist');
3965
3966var _playlist2 = _interopRequireDefault(_playlist);
3967
3968var _utilCodecsJs = require('./util/codecs.js');
3969
3970// Utilities
3971
3972/**
3973 * Returns the CSS value for the specified property on an element
3974 * using `getComputedStyle`. Firefox has a long-standing issue where
3975 * getComputedStyle() may return null when running in an iframe with
3976 * `display: none`.
3977 *
3978 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
3979 * @param {HTMLElement} el the htmlelement to work on
3980 * @param {string} the proprety to get the style for
3981 */
3982var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
3983 var result = undefined;
3984
3985 if (!el) {
3986 return '';
3987 }
3988
3989 result = window.getComputedStyle(el);
3990 if (!result) {
3991 return '';
3992 }
3993
3994 return result[property];
3995};
3996
3997/**
3998 * Resuable stable sort function
3999 *
4000 * @param {Playlists} array
4001 * @param {Function} sortFn Different comparators
4002 * @function stableSort
4003 */
4004var stableSort = function stableSort(array, sortFn) {
4005 var newArray = array.slice();
4006
4007 array.sort(function (left, right) {
4008 var cmp = sortFn(left, right);
4009
4010 if (cmp === 0) {
4011 return newArray.indexOf(left) - newArray.indexOf(right);
4012 }
4013 return cmp;
4014 });
4015};
4016
4017/**
4018 * A comparator function to sort two playlist object by bandwidth.
4019 *
4020 * @param {Object} left a media playlist object
4021 * @param {Object} right a media playlist object
4022 * @return {Number} Greater than zero if the bandwidth attribute of
4023 * left is greater than the corresponding attribute of right. Less
4024 * than zero if the bandwidth of right is greater than left and
4025 * exactly zero if the two are equal.
4026 */
4027var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
4028 var leftBandwidth = undefined;
4029 var rightBandwidth = undefined;
4030
4031 if (left.attributes.BANDWIDTH) {
4032 leftBandwidth = left.attributes.BANDWIDTH;
4033 }
4034 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
4035 if (right.attributes.BANDWIDTH) {
4036 rightBandwidth = right.attributes.BANDWIDTH;
4037 }
4038 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
4039
4040 return leftBandwidth - rightBandwidth;
4041};
4042
4043exports.comparePlaylistBandwidth = comparePlaylistBandwidth;
4044/**
4045 * A comparator function to sort two playlist object by resolution (width).
4046 * @param {Object} left a media playlist object
4047 * @param {Object} right a media playlist object
4048 * @return {Number} Greater than zero if the resolution.width attribute of
4049 * left is greater than the corresponding attribute of right. Less
4050 * than zero if the resolution.width of right is greater than left and
4051 * exactly zero if the two are equal.
4052 */
4053var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
4054 var leftWidth = undefined;
4055 var rightWidth = undefined;
4056
4057 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
4058 leftWidth = left.attributes.RESOLUTION.width;
4059 }
4060
4061 leftWidth = leftWidth || window.Number.MAX_VALUE;
4062
4063 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
4064 rightWidth = right.attributes.RESOLUTION.width;
4065 }
4066
4067 rightWidth = rightWidth || window.Number.MAX_VALUE;
4068
4069 // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
4070 // have the same media dimensions/ resolution
4071 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
4072 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
4073 }
4074 return leftWidth - rightWidth;
4075};
4076
4077exports.comparePlaylistResolution = comparePlaylistResolution;
4078/**
4079 * Chooses the appropriate media playlist based on bandwidth and player size
4080 *
4081 * @param {Object} master
4082 * Object representation of the master manifest
4083 * @param {Number} playerBandwidth
4084 * Current calculated bandwidth of the player
4085 * @param {Number} playerWidth
4086 * Current width of the player element
4087 * @param {Number} playerHeight
4088 * Current height of the player element
4089 * @return {Playlist} the highest bitrate playlist less than the
4090 * currently detected bandwidth, accounting for some amount of
4091 * bandwidth variance
4092 */
4093var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight) {
4094 // convert the playlists to an intermediary representation to make comparisons easier
4095 var sortedPlaylistReps = master.playlists.map(function (playlist) {
4096 var width = undefined;
4097 var height = undefined;
4098 var bandwidth = undefined;
4099
4100 width = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
4101 height = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
4102 bandwidth = playlist.attributes.BANDWIDTH;
4103
4104 bandwidth = bandwidth || window.Number.MAX_VALUE;
4105
4106 return {
4107 bandwidth: bandwidth,
4108 width: width,
4109 height: height,
4110 playlist: playlist
4111 };
4112 });
4113
4114 stableSort(sortedPlaylistReps, function (left, right) {
4115 return left.bandwidth - right.bandwidth;
4116 });
4117
4118 // filter out any playlists that have been excluded due to
4119 // incompatible configurations
4120 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
4121 return !_playlist2['default'].isIncompatible(rep.playlist);
4122 });
4123
4124 // filter out any playlists that have been disabled manually through the representations
4125 // api or blacklisted temporarily due to playback errors.
4126 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
4127 return _playlist2['default'].isEnabled(rep.playlist);
4128 });
4129
4130 if (!enabledPlaylistReps.length) {
4131 // if there are no enabled playlists, then they have all been blacklisted or disabled
4132 // by the user through the representations api. In this case, ignore blacklisting and
4133 // fallback to what the user wants by using playlists the user has not disabled.
4134 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
4135 return !_playlist2['default'].isDisabled(rep.playlist);
4136 });
4137 }
4138
4139 // filter out any variant that has greater effective bitrate
4140 // than the current estimated bandwidth
4141 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
4142 return rep.bandwidth * _config2['default'].BANDWIDTH_VARIANCE < playerBandwidth;
4143 });
4144
4145 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1];
4146
4147 // get all of the renditions with the same (highest) bandwidth
4148 // and then taking the very first element
4149 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
4150 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
4151 })[0];
4152
4153 // filter out playlists without resolution information
4154 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
4155 return rep.width && rep.height;
4156 });
4157
4158 // sort variants by resolution
4159 stableSort(haveResolution, function (left, right) {
4160 return left.width - right.width;
4161 });
4162
4163 // if we have the exact resolution as the player use it
4164 var resolutionBestRepList = haveResolution.filter(function (rep) {
4165 return rep.width === playerWidth && rep.height === playerHeight;
4166 });
4167
4168 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1];
4169 // ensure that we pick the highest bandwidth variant that have exact resolution
4170 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
4171 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
4172 })[0];
4173
4174 var resolutionPlusOneList = undefined;
4175 var resolutionPlusOneSmallest = undefined;
4176 var resolutionPlusOneRep = undefined;
4177
4178 // find the smallest variant that is larger than the player
4179 // if there is no match of exact resolution
4180 if (!resolutionBestRep) {
4181 resolutionPlusOneList = haveResolution.filter(function (rep) {
4182 return rep.width > playerWidth || rep.height > playerHeight;
4183 });
4184
4185 // find all the variants have the same smallest resolution
4186 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
4187 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
4188 });
4189
4190 // ensure that we also pick the highest bandwidth variant that
4191 // is just-larger-than the video player
4192 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
4193 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
4194 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
4195 })[0];
4196 }
4197
4198 // fallback chain of variants
4199 var chosenRep = resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
4200
4201 return chosenRep ? chosenRep.playlist : null;
4202};
4203
4204exports.simpleSelector = simpleSelector;
4205// Playlist Selectors
4206
4207/**
4208 * Chooses the appropriate media playlist based on the most recent
4209 * bandwidth estimate and the player size.
4210 *
4211 * Expects to be called within the context of an instance of HlsHandler
4212 *
4213 * @return {Playlist} the highest bitrate playlist less than the
4214 * currently detected bandwidth, accounting for some amount of
4215 * bandwidth variance
4216 */
4217var lastBandwidthSelector = function lastBandwidthSelector() {
4218 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10), parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10));
4219};
4220
4221exports.lastBandwidthSelector = lastBandwidthSelector;
4222/**
4223 * Chooses the appropriate media playlist based on an
4224 * exponential-weighted moving average of the bandwidth after
4225 * filtering for player size.
4226 *
4227 * Expects to be called within the context of an instance of HlsHandler
4228 *
4229 * @param {Number} decay - a number between 0 and 1. Higher values of
4230 * this parameter will cause previous bandwidth estimates to lose
4231 * significance more quickly.
4232 * @return {Function} a function which can be invoked to create a new
4233 * playlist selector function.
4234 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
4235 */
4236var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
4237 var average = -1;
4238
4239 if (decay < 0 || decay > 1) {
4240 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
4241 }
4242
4243 return function () {
4244 if (average < 0) {
4245 average = this.systemBandwidth;
4246 }
4247
4248 average = decay * this.systemBandwidth + (1 - decay) * average;
4249 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10), parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10));
4250 };
4251};
4252
4253exports.movingAverageBandwidthSelector = movingAverageBandwidthSelector;
4254/**
4255 * Chooses the appropriate media playlist based on the potential to rebuffer
4256 *
4257 * @param {Object} settings
4258 * Object of information required to use this selector
4259 * @param {Object} settings.master
4260 * Object representation of the master manifest
4261 * @param {Number} settings.currentTime
4262 * The current time of the player
4263 * @param {Number} settings.bandwidth
4264 * Current measured bandwidth
4265 * @param {Number} settings.duration
4266 * Duration of the media
4267 * @param {Number} settings.segmentDuration
4268 * Segment duration to be used in round trip time calculations
4269 * @param {Number} settings.timeUntilRebuffer
4270 * Time left in seconds until the player has to rebuffer
4271 * @param {Number} settings.currentTimeline
4272 * The current timeline segments are being loaded from
4273 * @param {SyncController} settings.syncController
4274 * SyncController for determining if we have a sync point for a given playlist
4275 * @return {Object|null}
4276 * {Object} return.playlist
4277 * The highest bandwidth playlist with the least amount of rebuffering
4278 * {Number} return.rebufferingImpact
4279 * The amount of time in seconds switching to this playlist will rebuffer. A
4280 * negative value means that switching will cause zero rebuffering.
4281 */
4282var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
4283 var master = settings.master;
4284 var currentTime = settings.currentTime;
4285 var bandwidth = settings.bandwidth;
4286 var duration = settings.duration;
4287 var segmentDuration = settings.segmentDuration;
4288 var timeUntilRebuffer = settings.timeUntilRebuffer;
4289 var currentTimeline = settings.currentTimeline;
4290 var syncController = settings.syncController;
4291
4292 // filter out any playlists that have been excluded due to
4293 // incompatible configurations
4294 var compatiblePlaylists = master.playlists.filter(function (playlist) {
4295 return !_playlist2['default'].isIncompatible(playlist);
4296 });
4297
4298 // filter out any playlists that have been disabled manually through the representations
4299 // api or blacklisted temporarily due to playback errors.
4300 var enabledPlaylists = compatiblePlaylists.filter(_playlist2['default'].isEnabled);
4301
4302 if (!enabledPlaylists.length) {
4303 // if there are no enabled playlists, then they have all been blacklisted or disabled
4304 // by the user through the representations api. In this case, ignore blacklisting and
4305 // fallback to what the user wants by using playlists the user has not disabled.
4306 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
4307 return !_playlist2['default'].isDisabled(playlist);
4308 });
4309 }
4310
4311 var bandwidthPlaylists = enabledPlaylists.filter(_playlist2['default'].hasAttribute.bind(null, 'BANDWIDTH'));
4312
4313 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
4314 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime);
4315 // If there is no sync point for this playlist, switching to it will require a
4316 // sync request first. This will double the request time
4317 var numRequests = syncPoint ? 1 : 2;
4318 var requestTimeEstimate = _playlist2['default'].estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
4319 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
4320
4321 return {
4322 playlist: playlist,
4323 rebufferingImpact: rebufferingImpact
4324 };
4325 });
4326
4327 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
4328 return estimate.rebufferingImpact <= 0;
4329 });
4330
4331 // Sort by bandwidth DESC
4332 stableSort(noRebufferingPlaylists, function (a, b) {
4333 return comparePlaylistBandwidth(b.playlist, a.playlist);
4334 });
4335
4336 if (noRebufferingPlaylists.length) {
4337 return noRebufferingPlaylists[0];
4338 }
4339
4340 stableSort(rebufferingEstimates, function (a, b) {
4341 return a.rebufferingImpact - b.rebufferingImpact;
4342 });
4343
4344 return rebufferingEstimates[0] || null;
4345};
4346
4347exports.minRebufferMaxBandwidthSelector = minRebufferMaxBandwidthSelector;
4348/**
4349 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
4350 * one with video. If no renditions with video exist, return the lowest audio rendition.
4351 *
4352 * Expects to be called within the context of an instance of HlsHandler
4353 *
4354 * @return {Object|null}
4355 * {Object} return.playlist
4356 * The lowest bitrate playlist that contains a video codec. If no such rendition
4357 * exists pick the lowest audio rendition.
4358 */
4359var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
4360 // filter out any playlists that have been excluded due to
4361 // incompatible configurations or playback errors
4362 var playlists = this.playlists.master.playlists.filter(_playlist2['default'].isEnabled);
4363
4364 // Sort ascending by bitrate
4365 stableSort(playlists, function (a, b) {
4366 return comparePlaylistBandwidth(a, b);
4367 });
4368
4369 // Parse and assume that playlists with no video codec have no video
4370 // (this is not necessarily true, although it is generally true).
4371 //
4372 // If an entire manifest has no valid videos everything will get filtered
4373 // out.
4374 var playlistsWithVideo = playlists.filter(function (playlist) {
4375 return (0, _utilCodecsJs.parseCodecs)(playlist.attributes.CODECS).videoCodec;
4376 });
4377
4378 return playlistsWithVideo[0] || null;
4379};
4380exports.lowestBitrateCompatibleVariantSelector = lowestBitrateCompatibleVariantSelector;
4381},{"./config":3,"./playlist":11,"./util/codecs.js":19}],11:[function(require,module,exports){
4382(function (global){
4383/**
4384 * @file playlist.js
4385 *
4386 * Playlist related utilities.
4387 */
4388'use strict';
4389
4390Object.defineProperty(exports, '__esModule', {
4391 value: true
4392});
4393
4394function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
4395
4396var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
4397
4398var _globalWindow = require('global/window');
4399
4400var _globalWindow2 = _interopRequireDefault(_globalWindow);
4401
4402/**
4403 * walk backward until we find a duration we can use
4404 * or return a failure
4405 *
4406 * @param {Playlist} playlist the playlist to walk through
4407 * @param {Number} endSequence the mediaSequence to stop walking on
4408 */
4409
4410var backwardDuration = function backwardDuration(playlist, endSequence) {
4411 var result = 0;
4412 var i = endSequence - playlist.mediaSequence;
4413 // if a start time is available for segment immediately following
4414 // the interval, use it
4415 var segment = playlist.segments[i];
4416
4417 // Walk backward until we find the latest segment with timeline
4418 // information that is earlier than endSequence
4419 if (segment) {
4420 if (typeof segment.start !== 'undefined') {
4421 return { result: segment.start, precise: true };
4422 }
4423 if (typeof segment.end !== 'undefined') {
4424 return {
4425 result: segment.end - segment.duration,
4426 precise: true
4427 };
4428 }
4429 }
4430 while (i--) {
4431 segment = playlist.segments[i];
4432 if (typeof segment.end !== 'undefined') {
4433 return { result: result + segment.end, precise: true };
4434 }
4435
4436 result += segment.duration;
4437
4438 if (typeof segment.start !== 'undefined') {
4439 return { result: result + segment.start, precise: true };
4440 }
4441 }
4442 return { result: result, precise: false };
4443};
4444
4445/**
4446 * walk forward until we find a duration we can use
4447 * or return a failure
4448 *
4449 * @param {Playlist} playlist the playlist to walk through
4450 * @param {Number} endSequence the mediaSequence to stop walking on
4451 */
4452var forwardDuration = function forwardDuration(playlist, endSequence) {
4453 var result = 0;
4454 var segment = undefined;
4455 var i = endSequence - playlist.mediaSequence;
4456 // Walk forward until we find the earliest segment with timeline
4457 // information
4458
4459 for (; i < playlist.segments.length; i++) {
4460 segment = playlist.segments[i];
4461 if (typeof segment.start !== 'undefined') {
4462 return {
4463 result: segment.start - result,
4464 precise: true
4465 };
4466 }
4467
4468 result += segment.duration;
4469
4470 if (typeof segment.end !== 'undefined') {
4471 return {
4472 result: segment.end - result,
4473 precise: true
4474 };
4475 }
4476 }
4477 // indicate we didn't find a useful duration estimate
4478 return { result: -1, precise: false };
4479};
4480
4481/**
4482 * Calculate the media duration from the segments associated with a
4483 * playlist. The duration of a subinterval of the available segments
4484 * may be calculated by specifying an end index.
4485 *
4486 * @param {Object} playlist a media playlist object
4487 * @param {Number=} endSequence an exclusive upper boundary
4488 * for the playlist. Defaults to playlist length.
4489 * @param {Number} expired the amount of time that has dropped
4490 * off the front of the playlist in a live scenario
4491 * @return {Number} the duration between the first available segment
4492 * and end index.
4493 */
4494var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
4495 var backward = undefined;
4496 var forward = undefined;
4497
4498 if (typeof endSequence === 'undefined') {
4499 endSequence = playlist.mediaSequence + playlist.segments.length;
4500 }
4501
4502 if (endSequence < playlist.mediaSequence) {
4503 return 0;
4504 }
4505
4506 // do a backward walk to estimate the duration
4507 backward = backwardDuration(playlist, endSequence);
4508 if (backward.precise) {
4509 // if we were able to base our duration estimate on timing
4510 // information provided directly from the Media Source, return
4511 // it
4512 return backward.result;
4513 }
4514
4515 // walk forward to see if a precise duration estimate can be made
4516 // that way
4517 forward = forwardDuration(playlist, endSequence);
4518 if (forward.precise) {
4519 // we found a segment that has been buffered and so it's
4520 // position is known precisely
4521 return forward.result;
4522 }
4523
4524 // return the less-precise, playlist-based duration estimate
4525 return backward.result + expired;
4526};
4527
4528/**
4529 * Calculates the duration of a playlist. If a start and end index
4530 * are specified, the duration will be for the subset of the media
4531 * timeline between those two indices. The total duration for live
4532 * playlists is always Infinity.
4533 *
4534 * @param {Object} playlist a media playlist object
4535 * @param {Number=} endSequence an exclusive upper
4536 * boundary for the playlist. Defaults to the playlist media
4537 * sequence number plus its length.
4538 * @param {Number=} expired the amount of time that has
4539 * dropped off the front of the playlist in a live scenario
4540 * @return {Number} the duration between the start index and end
4541 * index.
4542 */
4543var duration = function duration(playlist, endSequence, expired) {
4544 if (!playlist) {
4545 return 0;
4546 }
4547
4548 if (typeof expired !== 'number') {
4549 expired = 0;
4550 }
4551
4552 // if a slice of the total duration is not requested, use
4553 // playlist-level duration indicators when they're present
4554 if (typeof endSequence === 'undefined') {
4555 // if present, use the duration specified in the playlist
4556 if (playlist.totalDuration) {
4557 return playlist.totalDuration;
4558 }
4559
4560 // duration should be Infinity for live playlists
4561 if (!playlist.endList) {
4562 return _globalWindow2['default'].Infinity;
4563 }
4564 }
4565
4566 // calculate the total duration based on the segment durations
4567 return intervalDuration(playlist, endSequence, expired);
4568};
4569
4570exports.duration = duration;
4571/**
4572 * Calculate the time between two indexes in the current playlist
4573 * neight the start- nor the end-index need to be within the current
4574 * playlist in which case, the targetDuration of the playlist is used
4575 * to approximate the durations of the segments
4576 *
4577 * @param {Object} playlist a media playlist object
4578 * @param {Number} startIndex
4579 * @param {Number} endIndex
4580 * @return {Number} the number of seconds between startIndex and endIndex
4581 */
4582var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
4583 var durations = 0;
4584
4585 if (startIndex > endIndex) {
4586 var _ref = [endIndex, startIndex];
4587 startIndex = _ref[0];
4588 endIndex = _ref[1];
4589 }
4590
4591 if (startIndex < 0) {
4592 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
4593 durations += playlist.targetDuration;
4594 }
4595 startIndex = 0;
4596 }
4597
4598 for (var i = startIndex; i < endIndex; i++) {
4599 durations += playlist.segments[i].duration;
4600 }
4601
4602 return durations;
4603};
4604
4605exports.sumDurations = sumDurations;
4606/**
4607 * Determines the media index of the segment corresponding to the safe edge of the live
4608 * window which is the duration of the last segment plus 2 target durations from the end
4609 * of the playlist.
4610 *
4611 * @param {Object} playlist
4612 * a media playlist object
4613 * @return {Number}
4614 * The media index of the segment at the safe live point. 0 if there is no "safe"
4615 * point.
4616 * @function safeLiveIndex
4617 */
4618var safeLiveIndex = function safeLiveIndex(playlist) {
4619 if (!playlist.segments.length) {
4620 return 0;
4621 }
4622
4623 var i = playlist.segments.length - 1;
4624 var distanceFromEnd = playlist.segments[i].duration || playlist.targetDuration;
4625 var safeDistance = distanceFromEnd + playlist.targetDuration * 2;
4626
4627 while (i--) {
4628 distanceFromEnd += playlist.segments[i].duration;
4629
4630 if (distanceFromEnd >= safeDistance) {
4631 break;
4632 }
4633 }
4634
4635 return Math.max(0, i);
4636};
4637
4638exports.safeLiveIndex = safeLiveIndex;
4639/**
4640 * Calculates the playlist end time
4641 *
4642 * @param {Object} playlist a media playlist object
4643 * @param {Number=} expired the amount of time that has
4644 * dropped off the front of the playlist in a live scenario
4645 * @param {Boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
4646 * playlist end calculation should consider the safe live end
4647 * (truncate the playlist end by three segments). This is normally
4648 * used for calculating the end of the playlist's seekable range.
4649 * @returns {Number} the end time of playlist
4650 * @function playlistEnd
4651 */
4652var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd) {
4653 if (!playlist || !playlist.segments) {
4654 return null;
4655 }
4656 if (playlist.endList) {
4657 return duration(playlist);
4658 }
4659
4660 if (expired === null) {
4661 return null;
4662 }
4663
4664 expired = expired || 0;
4665
4666 var endSequence = useSafeLiveEnd ? safeLiveIndex(playlist) : playlist.segments.length;
4667
4668 return intervalDuration(playlist, playlist.mediaSequence + endSequence, expired);
4669};
4670
4671exports.playlistEnd = playlistEnd;
4672/**
4673 * Calculates the interval of time that is currently seekable in a
4674 * playlist. The returned time ranges are relative to the earliest
4675 * moment in the specified playlist that is still available. A full
4676 * seekable implementation for live streams would need to offset
4677 * these values by the duration of content that has expired from the
4678 * stream.
4679 *
4680 * @param {Object} playlist a media playlist object
4681 * dropped off the front of the playlist in a live scenario
4682 * @param {Number=} expired the amount of time that has
4683 * dropped off the front of the playlist in a live scenario
4684 * @return {TimeRanges} the periods of time that are valid targets
4685 * for seeking
4686 */
4687var seekable = function seekable(playlist, expired) {
4688 var useSafeLiveEnd = true;
4689 var seekableStart = expired || 0;
4690 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd);
4691
4692 if (seekableEnd === null) {
4693 return (0, _videoJs.createTimeRange)();
4694 }
4695 return (0, _videoJs.createTimeRange)(seekableStart, seekableEnd);
4696};
4697
4698exports.seekable = seekable;
4699var isWholeNumber = function isWholeNumber(num) {
4700 return num - Math.floor(num) === 0;
4701};
4702
4703var roundSignificantDigit = function roundSignificantDigit(increment, num) {
4704 // If we have a whole number, just add 1 to it
4705 if (isWholeNumber(num)) {
4706 return num + increment * 0.1;
4707 }
4708
4709 var numDecimalDigits = num.toString().split('.')[1].length;
4710
4711 for (var i = 1; i <= numDecimalDigits; i++) {
4712 var scale = Math.pow(10, i);
4713 var temp = num * scale;
4714
4715 if (isWholeNumber(temp) || i === numDecimalDigits) {
4716 return (temp + increment) / scale;
4717 }
4718 }
4719};
4720
4721var ceilLeastSignificantDigit = roundSignificantDigit.bind(null, 1);
4722var floorLeastSignificantDigit = roundSignificantDigit.bind(null, -1);
4723
4724/**
4725 * Determine the index and estimated starting time of the segment that
4726 * contains a specified playback position in a media playlist.
4727 *
4728 * @param {Object} playlist the media playlist to query
4729 * @param {Number} currentTime The number of seconds since the earliest
4730 * possible position to determine the containing segment for
4731 * @param {Number} startIndex
4732 * @param {Number} startTime
4733 * @return {Object}
4734 */
4735var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
4736 var i = undefined;
4737 var segment = undefined;
4738 var numSegments = playlist.segments.length;
4739
4740 var time = currentTime - startTime;
4741
4742 if (time < 0) {
4743 // Walk backward from startIndex in the playlist, adding durations
4744 // until we find a segment that contains `time` and return it
4745 if (startIndex > 0) {
4746 for (i = startIndex - 1; i >= 0; i--) {
4747 segment = playlist.segments[i];
4748 time += floorLeastSignificantDigit(segment.duration);
4749 if (time > 0) {
4750 return {
4751 mediaIndex: i,
4752 startTime: startTime - sumDurations(playlist, startIndex, i)
4753 };
4754 }
4755 }
4756 }
4757 // We were unable to find a good segment within the playlist
4758 // so select the first segment
4759 return {
4760 mediaIndex: 0,
4761 startTime: currentTime
4762 };
4763 }
4764
4765 // When startIndex is negative, we first walk forward to first segment
4766 // adding target durations. If we "run out of time" before getting to
4767 // the first segment, return the first segment
4768 if (startIndex < 0) {
4769 for (i = startIndex; i < 0; i++) {
4770 time -= playlist.targetDuration;
4771 if (time < 0) {
4772 return {
4773 mediaIndex: 0,
4774 startTime: currentTime
4775 };
4776 }
4777 }
4778 startIndex = 0;
4779 }
4780
4781 // Walk forward from startIndex in the playlist, subtracting durations
4782 // until we find a segment that contains `time` and return it
4783 for (i = startIndex; i < numSegments; i++) {
4784 segment = playlist.segments[i];
4785 time -= ceilLeastSignificantDigit(segment.duration);
4786 if (time < 0) {
4787 return {
4788 mediaIndex: i,
4789 startTime: startTime + sumDurations(playlist, startIndex, i)
4790 };
4791 }
4792 }
4793
4794 // We are out of possible candidates so load the last one...
4795 return {
4796 mediaIndex: numSegments - 1,
4797 startTime: currentTime
4798 };
4799};
4800
4801exports.getMediaInfoForTime = getMediaInfoForTime;
4802/**
4803 * Check whether the playlist is blacklisted or not.
4804 *
4805 * @param {Object} playlist the media playlist object
4806 * @return {boolean} whether the playlist is blacklisted or not
4807 * @function isBlacklisted
4808 */
4809var isBlacklisted = function isBlacklisted(playlist) {
4810 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
4811};
4812
4813exports.isBlacklisted = isBlacklisted;
4814/**
4815 * Check whether the playlist is compatible with current playback configuration or has
4816 * been blacklisted permanently for being incompatible.
4817 *
4818 * @param {Object} playlist the media playlist object
4819 * @return {boolean} whether the playlist is incompatible or not
4820 * @function isIncompatible
4821 */
4822var isIncompatible = function isIncompatible(playlist) {
4823 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
4824};
4825
4826exports.isIncompatible = isIncompatible;
4827/**
4828 * Check whether the playlist is enabled or not.
4829 *
4830 * @param {Object} playlist the media playlist object
4831 * @return {boolean} whether the playlist is enabled or not
4832 * @function isEnabled
4833 */
4834var isEnabled = function isEnabled(playlist) {
4835 var blacklisted = isBlacklisted(playlist);
4836
4837 return !playlist.disabled && !blacklisted;
4838};
4839
4840exports.isEnabled = isEnabled;
4841/**
4842 * Check whether the playlist has been manually disabled through the representations api.
4843 *
4844 * @param {Object} playlist the media playlist object
4845 * @return {boolean} whether the playlist is disabled manually or not
4846 * @function isDisabled
4847 */
4848var isDisabled = function isDisabled(playlist) {
4849 return playlist.disabled;
4850};
4851
4852exports.isDisabled = isDisabled;
4853/**
4854 * Returns whether the current playlist is an AES encrypted HLS stream
4855 *
4856 * @return {Boolean} true if it's an AES encrypted HLS stream
4857 */
4858var isAes = function isAes(media) {
4859 for (var i = 0; i < media.segments.length; i++) {
4860 if (media.segments[i].key) {
4861 return true;
4862 }
4863 }
4864 return false;
4865};
4866
4867exports.isAes = isAes;
4868/**
4869 * Returns whether the current playlist contains fMP4
4870 *
4871 * @return {Boolean} true if the playlist contains fMP4
4872 */
4873var isFmp4 = function isFmp4(media) {
4874 for (var i = 0; i < media.segments.length; i++) {
4875 if (media.segments[i].map) {
4876 return true;
4877 }
4878 }
4879 return false;
4880};
4881
4882exports.isFmp4 = isFmp4;
4883/**
4884 * Checks if the playlist has a value for the specified attribute
4885 *
4886 * @param {String} attr
4887 * Attribute to check for
4888 * @param {Object} playlist
4889 * The media playlist object
4890 * @return {Boolean}
4891 * Whether the playlist contains a value for the attribute or not
4892 * @function hasAttribute
4893 */
4894var hasAttribute = function hasAttribute(attr, playlist) {
4895 return playlist.attributes && playlist.attributes[attr];
4896};
4897
4898exports.hasAttribute = hasAttribute;
4899/**
4900 * Estimates the time required to complete a segment download from the specified playlist
4901 *
4902 * @param {Number} segmentDuration
4903 * Duration of requested segment
4904 * @param {Number} bandwidth
4905 * Current measured bandwidth of the player
4906 * @param {Object} playlist
4907 * The media playlist object
4908 * @param {Number=} bytesReceived
4909 * Number of bytes already received for the request. Defaults to 0
4910 * @return {Number|NaN}
4911 * The estimated time to request the segment. NaN if bandwidth information for
4912 * the given playlist is unavailable
4913 * @function estimateSegmentRequestTime
4914 */
4915var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist) {
4916 var bytesReceived = arguments.length <= 3 || arguments[3] === undefined ? 0 : arguments[3];
4917
4918 if (!hasAttribute('BANDWIDTH', playlist)) {
4919 return NaN;
4920 }
4921
4922 var size = segmentDuration * playlist.attributes.BANDWIDTH;
4923
4924 return (size - bytesReceived * 8) / bandwidth;
4925};
4926
4927exports.estimateSegmentRequestTime = estimateSegmentRequestTime;
4928// exports
4929exports['default'] = {
4930 duration: duration,
4931 seekable: seekable,
4932 safeLiveIndex: safeLiveIndex,
4933 getMediaInfoForTime: getMediaInfoForTime,
4934 isEnabled: isEnabled,
4935 isDisabled: isDisabled,
4936 isBlacklisted: isBlacklisted,
4937 isIncompatible: isIncompatible,
4938 playlistEnd: playlistEnd,
4939 isAes: isAes,
4940 isFmp4: isFmp4,
4941 hasAttribute: hasAttribute,
4942 estimateSegmentRequestTime: estimateSegmentRequestTime
4943};
4944}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
4945},{"global/window":32}],12:[function(require,module,exports){
4946(function (global){
4947/**
4948 * ranges
4949 *
4950 * Utilities for working with TimeRanges.
4951 *
4952 */
4953
4954'use strict';
4955
4956Object.defineProperty(exports, '__esModule', {
4957 value: true
4958});
4959
4960var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })();
4961
4962function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
4963
4964var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
4965
4966var _videoJs2 = _interopRequireDefault(_videoJs);
4967
4968// Fudge factor to account for TimeRanges rounding
4969var TIME_FUDGE_FACTOR = 1 / 30;
4970// Comparisons between time values such as current time and the end of the buffered range
4971// can be misleading because of precision differences or when the current media has poorly
4972// aligned audio and video, which can cause values to be slightly off from what you would
4973// expect. This value is what we consider to be safe to use in such comparisons to account
4974// for these scenarios.
4975var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
4976
4977/**
4978 * Clamps a value to within a range
4979 * @param {Number} num - the value to clamp
4980 * @param {Number} start - the start of the range to clamp within, inclusive
4981 * @param {Number} end - the end of the range to clamp within, inclusive
4982 * @return {Number}
4983 */
4984var clamp = function clamp(num, _ref) {
4985 var _ref2 = _slicedToArray(_ref, 2);
4986
4987 var start = _ref2[0];
4988 var end = _ref2[1];
4989
4990 return Math.min(Math.max(start, num), end);
4991};
4992var filterRanges = function filterRanges(timeRanges, predicate) {
4993 var results = [];
4994 var i = undefined;
4995
4996 if (timeRanges && timeRanges.length) {
4997 // Search for ranges that match the predicate
4998 for (i = 0; i < timeRanges.length; i++) {
4999 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
5000 results.push([timeRanges.start(i), timeRanges.end(i)]);
5001 }
5002 }
5003 }
5004
5005 return _videoJs2['default'].createTimeRanges(results);
5006};
5007
5008/**
5009 * Attempts to find the buffered TimeRange that contains the specified
5010 * time.
5011 * @param {TimeRanges} buffered - the TimeRanges object to query
5012 * @param {number} time - the time to filter on.
5013 * @returns {TimeRanges} a new TimeRanges object
5014 */
5015var findRange = function findRange(buffered, time) {
5016 return filterRanges(buffered, function (start, end) {
5017 return start - TIME_FUDGE_FACTOR <= time && end + TIME_FUDGE_FACTOR >= time;
5018 });
5019};
5020
5021/**
5022 * Returns the TimeRanges that begin later than the specified time.
5023 * @param {TimeRanges} timeRanges - the TimeRanges object to query
5024 * @param {number} time - the time to filter on.
5025 * @returns {TimeRanges} a new TimeRanges object.
5026 */
5027var findNextRange = function findNextRange(timeRanges, time) {
5028 return filterRanges(timeRanges, function (start) {
5029 return start - TIME_FUDGE_FACTOR >= time;
5030 });
5031};
5032
5033/**
5034 * Returns gaps within a list of TimeRanges
5035 * @param {TimeRanges} buffered - the TimeRanges object
5036 * @return {TimeRanges} a TimeRanges object of gaps
5037 */
5038var findGaps = function findGaps(buffered) {
5039 if (buffered.length < 2) {
5040 return _videoJs2['default'].createTimeRanges();
5041 }
5042
5043 var ranges = [];
5044
5045 for (var i = 1; i < buffered.length; i++) {
5046 var start = buffered.end(i - 1);
5047 var end = buffered.start(i);
5048
5049 ranges.push([start, end]);
5050 }
5051
5052 return _videoJs2['default'].createTimeRanges(ranges);
5053};
5054
5055/**
5056 * Search for a likely end time for the segment that was just appened
5057 * based on the state of the `buffered` property before and after the
5058 * append. If we fin only one such uncommon end-point return it.
5059 * @param {TimeRanges} original - the buffered time ranges before the update
5060 * @param {TimeRanges} update - the buffered time ranges after the update
5061 * @returns {Number|null} the end time added between `original` and `update`,
5062 * or null if one cannot be unambiguously determined.
5063 */
5064var findSoleUncommonTimeRangesEnd = function findSoleUncommonTimeRangesEnd(original, update) {
5065 var i = undefined;
5066 var start = undefined;
5067 var end = undefined;
5068 var result = [];
5069 var edges = [];
5070
5071 // In order to qualify as a possible candidate, the end point must:
5072 // 1) Not have already existed in the `original` ranges
5073 // 2) Not result from the shrinking of a range that already existed
5074 // in the `original` ranges
5075 // 3) Not be contained inside of a range that existed in `original`
5076 var overlapsCurrentEnd = function overlapsCurrentEnd(span) {
5077 return span[0] <= end && span[1] >= end;
5078 };
5079
5080 if (original) {
5081 // Save all the edges in the `original` TimeRanges object
5082 for (i = 0; i < original.length; i++) {
5083 start = original.start(i);
5084 end = original.end(i);
5085
5086 edges.push([start, end]);
5087 }
5088 }
5089
5090 if (update) {
5091 // Save any end-points in `update` that are not in the `original`
5092 // TimeRanges object
5093 for (i = 0; i < update.length; i++) {
5094 start = update.start(i);
5095 end = update.end(i);
5096
5097 if (edges.some(overlapsCurrentEnd)) {
5098 continue;
5099 }
5100
5101 // at this point it must be a unique non-shrinking end edge
5102 result.push(end);
5103 }
5104 }
5105
5106 // we err on the side of caution and return null if didn't find
5107 // exactly *one* differing end edge in the search above
5108 if (result.length !== 1) {
5109 return null;
5110 }
5111
5112 return result[0];
5113};
5114
5115/**
5116 * Calculate the intersection of two TimeRanges
5117 * @param {TimeRanges} bufferA
5118 * @param {TimeRanges} bufferB
5119 * @returns {TimeRanges} The interesection of `bufferA` with `bufferB`
5120 */
5121var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
5122 var start = null;
5123 var end = null;
5124 var arity = 0;
5125 var extents = [];
5126 var ranges = [];
5127
5128 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
5129 return _videoJs2['default'].createTimeRange();
5130 }
5131
5132 // Handle the case where we have both buffers and create an
5133 // intersection of the two
5134 var count = bufferA.length;
5135
5136 // A) Gather up all start and end times
5137 while (count--) {
5138 extents.push({ time: bufferA.start(count), type: 'start' });
5139 extents.push({ time: bufferA.end(count), type: 'end' });
5140 }
5141 count = bufferB.length;
5142 while (count--) {
5143 extents.push({ time: bufferB.start(count), type: 'start' });
5144 extents.push({ time: bufferB.end(count), type: 'end' });
5145 }
5146 // B) Sort them by time
5147 extents.sort(function (a, b) {
5148 return a.time - b.time;
5149 });
5150
5151 // C) Go along one by one incrementing arity for start and decrementing
5152 // arity for ends
5153 for (count = 0; count < extents.length; count++) {
5154 if (extents[count].type === 'start') {
5155 arity++;
5156
5157 // D) If arity is ever incremented to 2 we are entering an
5158 // overlapping range
5159 if (arity === 2) {
5160 start = extents[count].time;
5161 }
5162 } else if (extents[count].type === 'end') {
5163 arity--;
5164
5165 // E) If arity is ever decremented to 1 we leaving an
5166 // overlapping range
5167 if (arity === 1) {
5168 end = extents[count].time;
5169 }
5170 }
5171
5172 // F) Record overlapping ranges
5173 if (start !== null && end !== null) {
5174 ranges.push([start, end]);
5175 start = null;
5176 end = null;
5177 }
5178 }
5179
5180 return _videoJs2['default'].createTimeRanges(ranges);
5181};
5182
5183/**
5184 * Calculates the percentage of `segmentRange` that overlaps the
5185 * `buffered` time ranges.
5186 * @param {TimeRanges} segmentRange - the time range that the segment
5187 * covers adjusted according to currentTime
5188 * @param {TimeRanges} referenceRange - the original time range that the
5189 * segment covers
5190 * @param {Number} currentTime - time in seconds where the current playback
5191 * is at
5192 * @param {TimeRanges} buffered - the currently buffered time ranges
5193 * @returns {Number} percent of the segment currently buffered
5194 */
5195var calculateBufferedPercent = function calculateBufferedPercent(adjustedRange, referenceRange, currentTime, buffered) {
5196 var referenceDuration = referenceRange.end(0) - referenceRange.start(0);
5197 var adjustedDuration = adjustedRange.end(0) - adjustedRange.start(0);
5198 var bufferMissingFromAdjusted = referenceDuration - adjustedDuration;
5199 var adjustedIntersection = bufferIntersection(adjustedRange, buffered);
5200 var referenceIntersection = bufferIntersection(referenceRange, buffered);
5201 var adjustedOverlap = 0;
5202 var referenceOverlap = 0;
5203
5204 var count = adjustedIntersection.length;
5205
5206 while (count--) {
5207 adjustedOverlap += adjustedIntersection.end(count) - adjustedIntersection.start(count);
5208
5209 // If the current overlap segment starts at currentTime, then increase the
5210 // overlap duration so that it actually starts at the beginning of referenceRange
5211 // by including the difference between the two Range's durations
5212 // This is a work around for the way Flash has no buffer before currentTime
5213 if (adjustedIntersection.start(count) === currentTime) {
5214 adjustedOverlap += bufferMissingFromAdjusted;
5215 }
5216 }
5217
5218 count = referenceIntersection.length;
5219
5220 while (count--) {
5221 referenceOverlap += referenceIntersection.end(count) - referenceIntersection.start(count);
5222 }
5223
5224 // Use whichever value is larger for the percentage-buffered since that value
5225 // is likely more accurate because the only way
5226 return Math.max(adjustedOverlap, referenceOverlap) / referenceDuration * 100;
5227};
5228
5229/**
5230 * Return the amount of a range specified by the startOfSegment and segmentDuration
5231 * overlaps the current buffered content.
5232 *
5233 * @param {Number} startOfSegment - the time where the segment begins
5234 * @param {Number} segmentDuration - the duration of the segment in seconds
5235 * @param {Number} currentTime - time in seconds where the current playback
5236 * is at
5237 * @param {TimeRanges} buffered - the state of the buffer
5238 * @returns {Number} percentage of the segment's time range that is
5239 * already in `buffered`
5240 */
5241var getSegmentBufferedPercent = function getSegmentBufferedPercent(startOfSegment, segmentDuration, currentTime, buffered) {
5242 var endOfSegment = startOfSegment + segmentDuration;
5243
5244 // The entire time range of the segment
5245 var originalSegmentRange = _videoJs2['default'].createTimeRanges([[startOfSegment, endOfSegment]]);
5246
5247 // The adjusted segment time range that is setup such that it starts
5248 // no earlier than currentTime
5249 // Flash has no notion of a back-buffer so adjustedSegmentRange adjusts
5250 // for that and the function will still return 100% if a only half of a
5251 // segment is actually in the buffer as long as the currentTime is also
5252 // half-way through the segment
5253 var adjustedSegmentRange = _videoJs2['default'].createTimeRanges([[clamp(startOfSegment, [currentTime, endOfSegment]), endOfSegment]]);
5254
5255 // This condition happens when the currentTime is beyond the segment's
5256 // end time
5257 if (adjustedSegmentRange.start(0) === adjustedSegmentRange.end(0)) {
5258 return 0;
5259 }
5260
5261 var percent = calculateBufferedPercent(adjustedSegmentRange, originalSegmentRange, currentTime, buffered);
5262
5263 // If the segment is reported as having a zero duration, return 0%
5264 // since it is likely that we will need to fetch the segment
5265 if (isNaN(percent) || percent === Infinity || percent === -Infinity) {
5266 return 0;
5267 }
5268
5269 return percent;
5270};
5271
5272/**
5273 * Gets a human readable string for a TimeRange
5274 *
5275 * @param {TimeRange} range
5276 * @returns {String} a human readable string
5277 */
5278var printableRange = function printableRange(range) {
5279 var strArr = [];
5280
5281 if (!range || !range.length) {
5282 return '';
5283 }
5284
5285 for (var i = 0; i < range.length; i++) {
5286 strArr.push(range.start(i) + ' => ' + range.end(i));
5287 }
5288
5289 return strArr.join(', ');
5290};
5291
5292/**
5293 * Calculates the amount of time left in seconds until the player hits the end of the
5294 * buffer and causes a rebuffer
5295 *
5296 * @param {TimeRange} buffered
5297 * The state of the buffer
5298 * @param {Numnber} currentTime
5299 * The current time of the player
5300 * @param {Number} playbackRate
5301 * The current playback rate of the player. Defaults to 1.
5302 * @return {Number}
5303 * Time until the player has to start rebuffering in seconds.
5304 * @function timeUntilRebuffer
5305 */
5306var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime) {
5307 var playbackRate = arguments.length <= 2 || arguments[2] === undefined ? 1 : arguments[2];
5308
5309 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
5310
5311 return (bufferedEnd - currentTime) / playbackRate;
5312};
5313
5314exports['default'] = {
5315 findRange: findRange,
5316 findNextRange: findNextRange,
5317 findGaps: findGaps,
5318 findSoleUncommonTimeRangesEnd: findSoleUncommonTimeRangesEnd,
5319 getSegmentBufferedPercent: getSegmentBufferedPercent,
5320 TIME_FUDGE_FACTOR: TIME_FUDGE_FACTOR,
5321 SAFE_TIME_DELTA: SAFE_TIME_DELTA,
5322 printableRange: printableRange,
5323 timeUntilRebuffer: timeUntilRebuffer
5324};
5325module.exports = exports['default'];
5326}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
5327},{}],13:[function(require,module,exports){
5328(function (global){
5329'use strict';
5330
5331Object.defineProperty(exports, '__esModule', {
5332 value: true
5333});
5334
5335function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
5336
5337var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
5338
5339var _videoJs2 = _interopRequireDefault(_videoJs);
5340
5341var defaultOptions = {
5342 errorInterval: 30,
5343 getSource: function getSource(next) {
5344 var tech = this.tech({ IWillNotUseThisInPlugins: true });
5345 var sourceObj = tech.currentSource_;
5346
5347 return next(sourceObj);
5348 }
5349};
5350
5351/**
5352 * Main entry point for the plugin
5353 *
5354 * @param {Player} player a reference to a videojs Player instance
5355 * @param {Object} [options] an object with plugin options
5356 * @private
5357 */
5358var initPlugin = function initPlugin(player, options) {
5359 var lastCalled = 0;
5360 var seekTo = 0;
5361 var localOptions = _videoJs2['default'].mergeOptions(defaultOptions, options);
5362
5363 player.ready(function () {
5364 player.trigger({ type: 'usage', name: 'hls-error-reload-initialized' });
5365 });
5366
5367 /**
5368 * Player modifications to perform that must wait until `loadedmetadata`
5369 * has been triggered
5370 *
5371 * @private
5372 */
5373 var loadedMetadataHandler = function loadedMetadataHandler() {
5374 if (seekTo) {
5375 player.currentTime(seekTo);
5376 }
5377 };
5378
5379 /**
5380 * Set the source on the player element, play, and seek if necessary
5381 *
5382 * @param {Object} sourceObj An object specifying the source url and mime-type to play
5383 * @private
5384 */
5385 var setSource = function setSource(sourceObj) {
5386 if (sourceObj === null || sourceObj === undefined) {
5387 return;
5388 }
5389 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
5390
5391 player.one('loadedmetadata', loadedMetadataHandler);
5392
5393 player.src(sourceObj);
5394 player.trigger({ type: 'usage', name: 'hls-error-reload' });
5395 player.play();
5396 };
5397
5398 /**
5399 * Attempt to get a source from either the built-in getSource function
5400 * or a custom function provided via the options
5401 *
5402 * @private
5403 */
5404 var errorHandler = function errorHandler() {
5405 // Do not attempt to reload the source if a source-reload occurred before
5406 // 'errorInterval' time has elapsed since the last source-reload
5407 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
5408 player.trigger({ type: 'usage', name: 'hls-error-reload-canceled' });
5409 return;
5410 }
5411
5412 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
5413 _videoJs2['default'].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
5414 return;
5415 }
5416 lastCalled = Date.now();
5417
5418 return localOptions.getSource.call(player, setSource);
5419 };
5420
5421 /**
5422 * Unbind any event handlers that were bound by the plugin
5423 *
5424 * @private
5425 */
5426 var cleanupEvents = function cleanupEvents() {
5427 player.off('loadedmetadata', loadedMetadataHandler);
5428 player.off('error', errorHandler);
5429 player.off('dispose', cleanupEvents);
5430 };
5431
5432 /**
5433 * Cleanup before re-initializing the plugin
5434 *
5435 * @param {Object} [newOptions] an object with plugin options
5436 * @private
5437 */
5438 var reinitPlugin = function reinitPlugin(newOptions) {
5439 cleanupEvents();
5440 initPlugin(player, newOptions);
5441 };
5442
5443 player.on('error', errorHandler);
5444 player.on('dispose', cleanupEvents);
5445
5446 // Overwrite the plugin function so that we can correctly cleanup before
5447 // initializing the plugin
5448 player.reloadSourceOnError = reinitPlugin;
5449};
5450
5451/**
5452 * Reload the source when an error is detected as long as there
5453 * wasn't an error previously within the last 30 seconds
5454 *
5455 * @param {Object} [options] an object with plugin options
5456 */
5457var reloadSourceOnError = function reloadSourceOnError(options) {
5458 initPlugin(this, options);
5459};
5460
5461exports['default'] = reloadSourceOnError;
5462module.exports = exports['default'];
5463}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
5464},{}],14:[function(require,module,exports){
5465'use strict';
5466
5467Object.defineProperty(exports, '__esModule', {
5468 value: true
5469});
5470
5471function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
5472
5473var _playlistJs = require('./playlist.js');
5474
5475/**
5476 * Returns a function that acts as the Enable/disable playlist function.
5477 *
5478 * @param {PlaylistLoader} loader - The master playlist loader
5479 * @param {String} playlistUri - uri of the playlist
5480 * @param {Function} changePlaylistFn - A function to be called after a
5481 * playlist's enabled-state has been changed. Will NOT be called if a
5482 * playlist's enabled-state is unchanged
5483 * @param {Boolean=} enable - Value to set the playlist enabled-state to
5484 * or if undefined returns the current enabled-state for the playlist
5485 * @return {Function} Function for setting/getting enabled
5486 */
5487var enableFunction = function enableFunction(loader, playlistUri, changePlaylistFn) {
5488 return function (enable) {
5489 var playlist = loader.master.playlists[playlistUri];
5490 var incompatible = (0, _playlistJs.isIncompatible)(playlist);
5491 var currentlyEnabled = (0, _playlistJs.isEnabled)(playlist);
5492
5493 if (typeof enable === 'undefined') {
5494 return currentlyEnabled;
5495 }
5496
5497 if (enable) {
5498 delete playlist.disabled;
5499 } else {
5500 playlist.disabled = true;
5501 }
5502
5503 if (enable !== currentlyEnabled && !incompatible) {
5504 // Ensure the outside world knows about our changes
5505 changePlaylistFn();
5506 if (enable) {
5507 loader.trigger('renditionenabled');
5508 } else {
5509 loader.trigger('renditiondisabled');
5510 }
5511 }
5512 return enable;
5513 };
5514};
5515
5516/**
5517 * The representation object encapsulates the publicly visible information
5518 * in a media playlist along with a setter/getter-type function (enabled)
5519 * for changing the enabled-state of a particular playlist entry
5520 *
5521 * @class Representation
5522 */
5523
5524var Representation = function Representation(hlsHandler, playlist, id) {
5525 _classCallCheck(this, Representation);
5526
5527 // Get a reference to a bound version of fastQualityChange_
5528 var fastChangeFunction = hlsHandler.masterPlaylistController_.fastQualityChange_.bind(hlsHandler.masterPlaylistController_);
5529
5530 // some playlist attributes are optional
5531 if (playlist.attributes.RESOLUTION) {
5532 var resolution = playlist.attributes.RESOLUTION;
5533
5534 this.width = resolution.width;
5535 this.height = resolution.height;
5536 }
5537
5538 this.bandwidth = playlist.attributes.BANDWIDTH;
5539
5540 // The id is simply the ordinality of the media playlist
5541 // within the master playlist
5542 this.id = id;
5543
5544 // Partially-apply the enableFunction to create a playlist-
5545 // specific variant
5546 this.enabled = enableFunction(hlsHandler.playlists, playlist.uri, fastChangeFunction);
5547}
5548
5549/**
5550 * A mixin function that adds the `representations` api to an instance
5551 * of the HlsHandler class
5552 * @param {HlsHandler} hlsHandler - An instance of HlsHandler to add the
5553 * representation API into
5554 */
5555;
5556
5557var renditionSelectionMixin = function renditionSelectionMixin(hlsHandler) {
5558 var playlists = hlsHandler.playlists;
5559
5560 // Add a single API-specific function to the HlsHandler instance
5561 hlsHandler.representations = function () {
5562 return playlists.master.playlists.filter(function (media) {
5563 return !(0, _playlistJs.isIncompatible)(media);
5564 }).map(function (e, i) {
5565 return new Representation(hlsHandler, e, e.uri);
5566 });
5567 };
5568};
5569
5570exports['default'] = renditionSelectionMixin;
5571module.exports = exports['default'];
5572},{"./playlist.js":11}],15:[function(require,module,exports){
5573/**
5574 * @file resolve-url.js
5575 */
5576
5577'use strict';
5578
5579Object.defineProperty(exports, '__esModule', {
5580 value: true
5581});
5582
5583function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
5584
5585var _urlToolkit = require('url-toolkit');
5586
5587var _urlToolkit2 = _interopRequireDefault(_urlToolkit);
5588
5589var _globalWindow = require('global/window');
5590
5591var _globalWindow2 = _interopRequireDefault(_globalWindow);
5592
5593var resolveUrl = function resolveUrl(baseURL, relativeURL) {
5594 // return early if we don't need to resolve
5595 if (/^[a-z]+:/i.test(relativeURL)) {
5596 return relativeURL;
5597 }
5598
5599 // if the base URL is relative then combine with the current location
5600 if (!/\/\//i.test(baseURL)) {
5601 baseURL = _urlToolkit2['default'].buildAbsoluteURL(_globalWindow2['default'].location.href, baseURL);
5602 }
5603
5604 return _urlToolkit2['default'].buildAbsoluteURL(baseURL, relativeURL);
5605};
5606
5607exports['default'] = resolveUrl;
5608module.exports = exports['default'];
5609},{"global/window":32,"url-toolkit":63}],16:[function(require,module,exports){
5610(function (global){
5611/**
5612 * @file segment-loader.js
5613 */
5614'use strict';
5615
5616Object.defineProperty(exports, '__esModule', {
5617 value: true
5618});
5619
5620var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
5621
5622var _get = function get(_x4, _x5, _x6) { var _again = true; _function: while (_again) { var object = _x4, property = _x5, receiver = _x6; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x4 = parent; _x5 = property; _x6 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
5623
5624function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
5625
5626function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
5627
5628function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
5629
5630var _playlist = require('./playlist');
5631
5632var _playlist2 = _interopRequireDefault(_playlist);
5633
5634var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
5635
5636var _videoJs2 = _interopRequireDefault(_videoJs);
5637
5638var _sourceUpdater = require('./source-updater');
5639
5640var _sourceUpdater2 = _interopRequireDefault(_sourceUpdater);
5641
5642var _config = require('./config');
5643
5644var _config2 = _interopRequireDefault(_config);
5645
5646var _globalWindow = require('global/window');
5647
5648var _globalWindow2 = _interopRequireDefault(_globalWindow);
5649
5650var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs = require('videojs-contrib-media-sources/es5/remove-cues-from-track.js');
5651
5652var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2 = _interopRequireDefault(_videojsContribMediaSourcesEs5RemoveCuesFromTrackJs);
5653
5654var _binUtils = require('./bin-utils');
5655
5656var _mediaSegmentRequest = require('./media-segment-request');
5657
5658var _ranges = require('./ranges');
5659
5660var _playlistSelectors = require('./playlist-selectors');
5661
5662// in ms
5663var CHECK_BUFFER_DELAY = 500;
5664
5665/**
5666 * Determines if we should call endOfStream on the media source based
5667 * on the state of the buffer or if appened segment was the final
5668 * segment in the playlist.
5669 *
5670 * @param {Object} playlist a media playlist object
5671 * @param {Object} mediaSource the MediaSource object
5672 * @param {Number} segmentIndex the index of segment we last appended
5673 * @returns {Boolean} do we need to call endOfStream on the MediaSource
5674 */
5675var detectEndOfStream = function detectEndOfStream(playlist, mediaSource, segmentIndex) {
5676 if (!playlist || !mediaSource) {
5677 return false;
5678 }
5679
5680 var segments = playlist.segments;
5681
5682 // determine a few boolean values to help make the branch below easier
5683 // to read
5684 var appendedLastSegment = segmentIndex === segments.length;
5685
5686 // if we've buffered to the end of the video, we need to call endOfStream
5687 // so that MediaSources can trigger the `ended` event when it runs out of
5688 // buffered data instead of waiting for me
5689 return playlist.endList && mediaSource.readyState === 'open' && appendedLastSegment;
5690};
5691
5692var finite = function finite(num) {
5693 return typeof num === 'number' && isFinite(num);
5694};
5695
5696var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, newSegmentMedia) {
5697 // Although these checks should most likely cover non 'main' types, for now it narrows
5698 // the scope of our checks.
5699 if (loaderType !== 'main' || !startingMedia || !newSegmentMedia) {
5700 return null;
5701 }
5702
5703 if (!newSegmentMedia.containsAudio && !newSegmentMedia.containsVideo) {
5704 return 'Neither audio nor video found in segment.';
5705 }
5706
5707 if (startingMedia.containsVideo && !newSegmentMedia.containsVideo) {
5708 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
5709 }
5710
5711 if (!startingMedia.containsVideo && newSegmentMedia.containsVideo) {
5712 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
5713 }
5714
5715 return null;
5716};
5717
5718exports.illegalMediaSwitch = illegalMediaSwitch;
5719/**
5720 * Calculates a time value that is safe to remove from the back buffer without interupting
5721 * playback.
5722 *
5723 * @param {TimeRange} seekable
5724 * The current seekable range
5725 * @param {Number} currentTime
5726 * The current time of the player
5727 * @param {Number} targetDuration
5728 * The target duration of the current playlist
5729 * @return {Number}
5730 * Time that is safe to remove from the back buffer without interupting playback
5731 */
5732var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
5733 var removeToTime = undefined;
5734
5735 if (seekable.length && seekable.start(0) > 0 && seekable.start(0) < currentTime) {
5736 // If we have a seekable range use that as the limit for what can be removed safely
5737 removeToTime = seekable.start(0);
5738 } else {
5739 // otherwise remove anything older than 30 seconds before the current play head
5740 removeToTime = currentTime - 30;
5741 }
5742
5743 // Don't allow removing from the buffer within target duration of current time
5744 // to avoid the possibility of removing the GOP currently being played which could
5745 // cause playback stalls.
5746 return Math.min(removeToTime, currentTime - targetDuration);
5747};
5748
5749exports.safeBackBufferTrimTime = safeBackBufferTrimTime;
5750/**
5751 * An object that manages segment loading and appending.
5752 *
5753 * @class SegmentLoader
5754 * @param {Object} options required and optional options
5755 * @extends videojs.EventTarget
5756 */
5757
5758var SegmentLoader = (function (_videojs$EventTarget) {
5759 _inherits(SegmentLoader, _videojs$EventTarget);
5760
5761 function SegmentLoader(settings) {
5762 var _this = this;
5763
5764 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
5765
5766 _classCallCheck(this, SegmentLoader);
5767
5768 _get(Object.getPrototypeOf(SegmentLoader.prototype), 'constructor', this).call(this);
5769 // check pre-conditions
5770 if (!settings) {
5771 throw new TypeError('Initialization settings are required');
5772 }
5773 if (typeof settings.currentTime !== 'function') {
5774 throw new TypeError('No currentTime getter specified');
5775 }
5776 if (!settings.mediaSource) {
5777 throw new TypeError('No MediaSource specified');
5778 }
5779 // public properties
5780 this.state = 'INIT';
5781 this.bandwidth = settings.bandwidth;
5782 this.throughput = { rate: 0, count: 0 };
5783 this.roundTrip = NaN;
5784 this.resetStats_();
5785 this.mediaIndex = null;
5786
5787 // private settings
5788 this.hasPlayed_ = settings.hasPlayed;
5789 this.currentTime_ = settings.currentTime;
5790 this.seekable_ = settings.seekable;
5791 this.seeking_ = settings.seeking;
5792 this.duration_ = settings.duration;
5793 this.mediaSource_ = settings.mediaSource;
5794 this.hls_ = settings.hls;
5795 this.loaderType_ = settings.loaderType;
5796 this.startingMedia_ = void 0;
5797 this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
5798 this.goalBufferLength_ = settings.goalBufferLength;
5799
5800 // private instance variables
5801 this.checkBufferTimeout_ = null;
5802 this.error_ = void 0;
5803 this.currentTimeline_ = -1;
5804 this.pendingSegment_ = null;
5805 this.mimeType_ = null;
5806 this.sourceUpdater_ = null;
5807 this.xhrOptions_ = null;
5808
5809 // Fragmented mp4 playback
5810 this.activeInitSegmentId_ = null;
5811 this.initSegments_ = {};
5812
5813 this.decrypter_ = settings.decrypter;
5814
5815 // Manages the tracking and generation of sync-points, mappings
5816 // between a time in the display time and a segment index within
5817 // a playlist
5818 this.syncController_ = settings.syncController;
5819 this.syncPoint_ = {
5820 segmentIndex: 0,
5821 time: 0
5822 };
5823
5824 this.syncController_.on('syncinfoupdate', function () {
5825 return _this.trigger('syncinfoupdate');
5826 });
5827
5828 this.mediaSource_.addEventListener('sourceopen', function () {
5829 return _this.ended_ = false;
5830 });
5831
5832 // ...for determining the fetch location
5833 this.fetchAtBuffer_ = false;
5834
5835 if (options.debug) {
5836 this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'segment-loader', this.loaderType_, '->');
5837 }
5838 }
5839
5840 /**
5841 * reset all of our media stats
5842 *
5843 * @private
5844 */
5845
5846 _createClass(SegmentLoader, [{
5847 key: 'resetStats_',
5848 value: function resetStats_() {
5849 this.mediaBytesTransferred = 0;
5850 this.mediaRequests = 0;
5851 this.mediaRequestsAborted = 0;
5852 this.mediaRequestsTimedout = 0;
5853 this.mediaRequestsErrored = 0;
5854 this.mediaTransferDuration = 0;
5855 this.mediaSecondsLoaded = 0;
5856 }
5857
5858 /**
5859 * dispose of the SegmentLoader and reset to the default state
5860 */
5861 }, {
5862 key: 'dispose',
5863 value: function dispose() {
5864 this.state = 'DISPOSED';
5865 this.pause();
5866 this.abort_();
5867 if (this.sourceUpdater_) {
5868 this.sourceUpdater_.dispose();
5869 }
5870 this.resetStats_();
5871 }
5872
5873 /**
5874 * abort anything that is currently doing on with the SegmentLoader
5875 * and reset to a default state
5876 */
5877 }, {
5878 key: 'abort',
5879 value: function abort() {
5880 if (this.state !== 'WAITING') {
5881 if (this.pendingSegment_) {
5882 this.pendingSegment_ = null;
5883 }
5884 return;
5885 }
5886
5887 this.abort_();
5888
5889 // We aborted the requests we were waiting on, so reset the loader's state to READY
5890 // since we are no longer "waiting" on any requests. XHR callback is not always run
5891 // when the request is aborted. This will prevent the loader from being stuck in the
5892 // WAITING state indefinitely.
5893 this.state = 'READY';
5894
5895 // don't wait for buffer check timeouts to begin fetching the
5896 // next segment
5897 if (!this.paused()) {
5898 this.monitorBuffer_();
5899 }
5900 }
5901
5902 /**
5903 * abort all pending xhr requests and null any pending segements
5904 *
5905 * @private
5906 */
5907 }, {
5908 key: 'abort_',
5909 value: function abort_() {
5910 if (this.pendingSegment_) {
5911 this.pendingSegment_.abortRequests();
5912 }
5913
5914 // clear out the segment being processed
5915 this.pendingSegment_ = null;
5916 }
5917
5918 /**
5919 * set an error on the segment loader and null out any pending segements
5920 *
5921 * @param {Error} error the error to set on the SegmentLoader
5922 * @return {Error} the error that was set or that is currently set
5923 */
5924 }, {
5925 key: 'error',
5926 value: function error(_error) {
5927 if (typeof _error !== 'undefined') {
5928 this.error_ = _error;
5929 }
5930
5931 this.pendingSegment_ = null;
5932 return this.error_;
5933 }
5934 }, {
5935 key: 'endOfStream',
5936 value: function endOfStream() {
5937 this.ended_ = true;
5938 this.pause();
5939 this.trigger('ended');
5940 }
5941
5942 /**
5943 * Indicates which time ranges are buffered
5944 *
5945 * @return {TimeRange}
5946 * TimeRange object representing the current buffered ranges
5947 */
5948 }, {
5949 key: 'buffered_',
5950 value: function buffered_() {
5951 if (!this.sourceUpdater_) {
5952 return _videoJs2['default'].createTimeRanges();
5953 }
5954
5955 return this.sourceUpdater_.buffered();
5956 }
5957
5958 /**
5959 * Gets and sets init segment for the provided map
5960 *
5961 * @param {Object} map
5962 * The map object representing the init segment to get or set
5963 * @param {Boolean=} set
5964 * If true, the init segment for the provided map should be saved
5965 * @return {Object}
5966 * map object for desired init segment
5967 */
5968 }, {
5969 key: 'initSegment',
5970 value: function initSegment(map) {
5971 var set = arguments.length <= 1 || arguments[1] === undefined ? false : arguments[1];
5972
5973 if (!map) {
5974 return null;
5975 }
5976
5977 var id = (0, _binUtils.initSegmentId)(map);
5978 var storedMap = this.initSegments_[id];
5979
5980 if (set && !storedMap && map.bytes) {
5981 this.initSegments_[id] = storedMap = {
5982 resolvedUri: map.resolvedUri,
5983 byterange: map.byterange,
5984 bytes: map.bytes
5985 };
5986 }
5987
5988 return storedMap || map;
5989 }
5990
5991 /**
5992 * Returns true if all configuration required for loading is present, otherwise false.
5993 *
5994 * @return {Boolean} True if the all configuration is ready for loading
5995 * @private
5996 */
5997 }, {
5998 key: 'couldBeginLoading_',
5999 value: function couldBeginLoading_() {
6000 return this.playlist_ && (
6001 // the source updater is created when init_ is called, so either having a
6002 // source updater or being in the INIT state with a mimeType is enough
6003 // to say we have all the needed configuration to start loading.
6004 this.sourceUpdater_ || this.mimeType_ && this.state === 'INIT') && !this.paused();
6005 }
6006
6007 /**
6008 * load a playlist and start to fill the buffer
6009 */
6010 }, {
6011 key: 'load',
6012 value: function load() {
6013 // un-pause
6014 this.monitorBuffer_();
6015
6016 // if we don't have a playlist yet, keep waiting for one to be
6017 // specified
6018 if (!this.playlist_) {
6019 return;
6020 }
6021
6022 // not sure if this is the best place for this
6023 this.syncController_.setDateTimeMapping(this.playlist_);
6024
6025 // if all the configuration is ready, initialize and begin loading
6026 if (this.state === 'INIT' && this.couldBeginLoading_()) {
6027 return this.init_();
6028 }
6029
6030 // if we're in the middle of processing a segment already, don't
6031 // kick off an additional segment request
6032 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
6033 return;
6034 }
6035
6036 this.state = 'READY';
6037 }
6038
6039 /**
6040 * Once all the starting parameters have been specified, begin
6041 * operation. This method should only be invoked from the INIT
6042 * state.
6043 *
6044 * @private
6045 */
6046 }, {
6047 key: 'init_',
6048 value: function init_() {
6049 this.state = 'READY';
6050 this.sourceUpdater_ = new _sourceUpdater2['default'](this.mediaSource_, this.mimeType_);
6051 this.resetEverything();
6052 return this.monitorBuffer_();
6053 }
6054
6055 /**
6056 * set a playlist on the segment loader
6057 *
6058 * @param {PlaylistLoader} media the playlist to set on the segment loader
6059 */
6060 }, {
6061 key: 'playlist',
6062 value: function playlist(newPlaylist) {
6063 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
6064
6065 if (!newPlaylist) {
6066 return;
6067 }
6068
6069 var oldPlaylist = this.playlist_;
6070 var segmentInfo = this.pendingSegment_;
6071
6072 this.playlist_ = newPlaylist;
6073 this.xhrOptions_ = options;
6074
6075 // when we haven't started playing yet, the start of a live playlist
6076 // is always our zero-time so force a sync update each time the playlist
6077 // is refreshed from the server
6078 if (!this.hasPlayed_()) {
6079 newPlaylist.syncInfo = {
6080 mediaSequence: newPlaylist.mediaSequence,
6081 time: 0
6082 };
6083 }
6084
6085 // in VOD, this is always a rendition switch (or we updated our syncInfo above)
6086 // in LIVE, we always want to update with new playlists (including refreshes)
6087 this.trigger('syncinfoupdate');
6088
6089 // if we were unpaused but waiting for a playlist, start
6090 // buffering now
6091 if (this.state === 'INIT' && this.couldBeginLoading_()) {
6092 return this.init_();
6093 }
6094
6095 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
6096 if (this.mediaIndex !== null) {
6097 // we must "resync" the segment loader when we switch renditions and
6098 // the segment loader is already synced to the previous rendition
6099 this.resyncLoader();
6100 }
6101
6102 // the rest of this function depends on `oldPlaylist` being defined
6103 return;
6104 }
6105
6106 // we reloaded the same playlist so we are in a live scenario
6107 // and we will likely need to adjust the mediaIndex
6108 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
6109
6110 this.logger_('mediaSequenceDiff', mediaSequenceDiff);
6111
6112 // update the mediaIndex on the SegmentLoader
6113 // this is important because we can abort a request and this value must be
6114 // equal to the last appended mediaIndex
6115 if (this.mediaIndex !== null) {
6116 this.mediaIndex -= mediaSequenceDiff;
6117 }
6118
6119 // update the mediaIndex on the SegmentInfo object
6120 // this is important because we will update this.mediaIndex with this value
6121 // in `handleUpdateEnd_` after the segment has been successfully appended
6122 if (segmentInfo) {
6123 segmentInfo.mediaIndex -= mediaSequenceDiff;
6124
6125 // we need to update the referenced segment so that timing information is
6126 // saved for the new playlist's segment, however, if the segment fell off the
6127 // playlist, we can leave the old reference and just lose the timing info
6128 if (segmentInfo.mediaIndex >= 0) {
6129 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
6130 }
6131 }
6132
6133 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
6134 }
6135
6136 /**
6137 * Prevent the loader from fetching additional segments. If there
6138 * is a segment request outstanding, it will finish processing
6139 * before the loader halts. A segment loader can be unpaused by
6140 * calling load().
6141 */
6142 }, {
6143 key: 'pause',
6144 value: function pause() {
6145 if (this.checkBufferTimeout_) {
6146 _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
6147
6148 this.checkBufferTimeout_ = null;
6149 }
6150 }
6151
6152 /**
6153 * Returns whether the segment loader is fetching additional
6154 * segments when given the opportunity. This property can be
6155 * modified through calls to pause() and load().
6156 */
6157 }, {
6158 key: 'paused',
6159 value: function paused() {
6160 return this.checkBufferTimeout_ === null;
6161 }
6162
6163 /**
6164 * create/set the following mimetype on the SourceBuffer through a
6165 * SourceUpdater
6166 *
6167 * @param {String} mimeType the mime type string to use
6168 */
6169 }, {
6170 key: 'mimeType',
6171 value: function mimeType(_mimeType) {
6172 if (this.mimeType_) {
6173 return;
6174 }
6175
6176 this.mimeType_ = _mimeType;
6177 // if we were unpaused but waiting for a sourceUpdater, start
6178 // buffering now
6179 if (this.state === 'INIT' && this.couldBeginLoading_()) {
6180 this.init_();
6181 }
6182 }
6183
6184 /**
6185 * Delete all the buffered data and reset the SegmentLoader
6186 */
6187 }, {
6188 key: 'resetEverything',
6189 value: function resetEverything() {
6190 this.ended_ = false;
6191 this.resetLoader();
6192 this.remove(0, this.duration_());
6193 this.trigger('reseteverything');
6194 }
6195
6196 /**
6197 * Force the SegmentLoader to resync and start loading around the currentTime instead
6198 * of starting at the end of the buffer
6199 *
6200 * Useful for fast quality changes
6201 */
6202 }, {
6203 key: 'resetLoader',
6204 value: function resetLoader() {
6205 this.fetchAtBuffer_ = false;
6206 this.resyncLoader();
6207 }
6208
6209 /**
6210 * Force the SegmentLoader to restart synchronization and make a conservative guess
6211 * before returning to the simple walk-forward method
6212 */
6213 }, {
6214 key: 'resyncLoader',
6215 value: function resyncLoader() {
6216 this.mediaIndex = null;
6217 this.syncPoint_ = null;
6218 this.abort();
6219 }
6220
6221 /**
6222 * Remove any data in the source buffer between start and end times
6223 * @param {Number} start - the start time of the region to remove from the buffer
6224 * @param {Number} end - the end time of the region to remove from the buffer
6225 */
6226 }, {
6227 key: 'remove',
6228 value: function remove(start, end) {
6229 if (this.sourceUpdater_) {
6230 this.sourceUpdater_.remove(start, end);
6231 }
6232 (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.segmentMetadataTrack_);
6233 }
6234
6235 /**
6236 * (re-)schedule monitorBufferTick_ to run as soon as possible
6237 *
6238 * @private
6239 */
6240 }, {
6241 key: 'monitorBuffer_',
6242 value: function monitorBuffer_() {
6243 if (this.checkBufferTimeout_) {
6244 _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
6245 }
6246
6247 this.checkBufferTimeout_ = _globalWindow2['default'].setTimeout(this.monitorBufferTick_.bind(this), 1);
6248 }
6249
6250 /**
6251 * As long as the SegmentLoader is in the READY state, periodically
6252 * invoke fillBuffer_().
6253 *
6254 * @private
6255 */
6256 }, {
6257 key: 'monitorBufferTick_',
6258 value: function monitorBufferTick_() {
6259 if (this.state === 'READY') {
6260 this.fillBuffer_();
6261 }
6262
6263 if (this.checkBufferTimeout_) {
6264 _globalWindow2['default'].clearTimeout(this.checkBufferTimeout_);
6265 }
6266
6267 this.checkBufferTimeout_ = _globalWindow2['default'].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
6268 }
6269
6270 /**
6271 * fill the buffer with segements unless the sourceBuffers are
6272 * currently updating
6273 *
6274 * Note: this function should only ever be called by monitorBuffer_
6275 * and never directly
6276 *
6277 * @private
6278 */
6279 }, {
6280 key: 'fillBuffer_',
6281 value: function fillBuffer_() {
6282 if (this.sourceUpdater_.updating()) {
6283 return;
6284 }
6285
6286 if (!this.syncPoint_) {
6287 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
6288 }
6289
6290 // see if we need to begin loading immediately
6291 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
6292
6293 if (!segmentInfo) {
6294 return;
6295 }
6296
6297 var isEndOfStream = detectEndOfStream(this.playlist_, this.mediaSource_, segmentInfo.mediaIndex);
6298
6299 if (isEndOfStream) {
6300 this.endOfStream();
6301 return;
6302 }
6303
6304 if (segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
6305 return;
6306 }
6307
6308 // We will need to change timestampOffset of the sourceBuffer if either of
6309 // the following conditions are true:
6310 // - The segment.timeline !== this.currentTimeline
6311 // (we are crossing a discontinuity somehow)
6312 // - The "timestampOffset" for the start of this segment is less than
6313 // the currently set timestampOffset
6314 if (segmentInfo.timeline !== this.currentTimeline_ || segmentInfo.startOfSegment !== null && segmentInfo.startOfSegment < this.sourceUpdater_.timestampOffset()) {
6315 this.syncController_.reset();
6316 segmentInfo.timestampOffset = segmentInfo.startOfSegment;
6317 }
6318
6319 this.loadSegment_(segmentInfo);
6320 }
6321
6322 /**
6323 * Determines what segment request should be made, given current playback
6324 * state.
6325 *
6326 * @param {TimeRanges} buffered - the state of the buffer
6327 * @param {Object} playlist - the playlist object to fetch segments from
6328 * @param {Number} mediaIndex - the previous mediaIndex fetched or null
6329 * @param {Boolean} hasPlayed - a flag indicating whether we have played or not
6330 * @param {Number} currentTime - the playback position in seconds
6331 * @param {Object} syncPoint - a segment info object that describes the
6332 * @returns {Object} a segment request object that describes the segment to load
6333 */
6334 }, {
6335 key: 'checkBuffer_',
6336 value: function checkBuffer_(buffered, playlist, mediaIndex, hasPlayed, currentTime, syncPoint) {
6337 var lastBufferedEnd = 0;
6338 var startOfSegment = undefined;
6339
6340 if (buffered.length) {
6341 lastBufferedEnd = buffered.end(buffered.length - 1);
6342 }
6343
6344 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
6345
6346 if (!playlist.segments.length) {
6347 return null;
6348 }
6349
6350 // if there is plenty of content buffered, and the video has
6351 // been played before relax for awhile
6352 if (bufferedTime >= this.goalBufferLength_()) {
6353 return null;
6354 }
6355
6356 // if the video has not yet played once, and we already have
6357 // one segment downloaded do nothing
6358 if (!hasPlayed && bufferedTime >= 1) {
6359 return null;
6360 }
6361
6362 this.logger_('checkBuffer_', 'mediaIndex:', mediaIndex, 'hasPlayed:', hasPlayed, 'currentTime:', currentTime, 'syncPoint:', syncPoint, 'fetchAtBuffer:', this.fetchAtBuffer_, 'bufferedTime:', bufferedTime);
6363
6364 // When the syncPoint is null, there is no way of determining a good
6365 // conservative segment index to fetch from
6366 // The best thing to do here is to get the kind of sync-point data by
6367 // making a request
6368 if (syncPoint === null) {
6369 mediaIndex = this.getSyncSegmentCandidate_(playlist);
6370 this.logger_('getSync', 'mediaIndex:', mediaIndex);
6371 return this.generateSegmentInfo_(playlist, mediaIndex, null, true);
6372 }
6373
6374 // Under normal playback conditions fetching is a simple walk forward
6375 if (mediaIndex !== null) {
6376 this.logger_('walkForward', 'mediaIndex:', mediaIndex + 1);
6377 var segment = playlist.segments[mediaIndex];
6378
6379 if (segment && segment.end) {
6380 startOfSegment = segment.end;
6381 } else {
6382 startOfSegment = lastBufferedEnd;
6383 }
6384 return this.generateSegmentInfo_(playlist, mediaIndex + 1, startOfSegment, false);
6385 }
6386
6387 // There is a sync-point but the lack of a mediaIndex indicates that
6388 // we need to make a good conservative guess about which segment to
6389 // fetch
6390 if (this.fetchAtBuffer_) {
6391 // Find the segment containing the end of the buffer
6392 var mediaSourceInfo = _playlist2['default'].getMediaInfoForTime(playlist, lastBufferedEnd, syncPoint.segmentIndex, syncPoint.time);
6393
6394 mediaIndex = mediaSourceInfo.mediaIndex;
6395 startOfSegment = mediaSourceInfo.startTime;
6396 } else {
6397 // Find the segment containing currentTime
6398 var mediaSourceInfo = _playlist2['default'].getMediaInfoForTime(playlist, currentTime, syncPoint.segmentIndex, syncPoint.time);
6399
6400 mediaIndex = mediaSourceInfo.mediaIndex;
6401 startOfSegment = mediaSourceInfo.startTime;
6402 }
6403 this.logger_('getMediaIndexForTime', 'mediaIndex:', mediaIndex, 'startOfSegment:', startOfSegment);
6404
6405 return this.generateSegmentInfo_(playlist, mediaIndex, startOfSegment, false);
6406 }
6407
6408 /**
6409 * The segment loader has no recourse except to fetch a segment in the
6410 * current playlist and use the internal timestamps in that segment to
6411 * generate a syncPoint. This function returns a good candidate index
6412 * for that process.
6413 *
6414 * @param {Object} playlist - the playlist object to look for a
6415 * @returns {Number} An index of a segment from the playlist to load
6416 */
6417 }, {
6418 key: 'getSyncSegmentCandidate_',
6419 value: function getSyncSegmentCandidate_(playlist) {
6420 var _this2 = this;
6421
6422 if (this.currentTimeline_ === -1) {
6423 return 0;
6424 }
6425
6426 var segmentIndexArray = playlist.segments.map(function (s, i) {
6427 return {
6428 timeline: s.timeline,
6429 segmentIndex: i
6430 };
6431 }).filter(function (s) {
6432 return s.timeline === _this2.currentTimeline_;
6433 });
6434
6435 if (segmentIndexArray.length) {
6436 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
6437 }
6438
6439 return Math.max(playlist.segments.length - 1, 0);
6440 }
6441 }, {
6442 key: 'generateSegmentInfo_',
6443 value: function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest) {
6444 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
6445 return null;
6446 }
6447
6448 var segment = playlist.segments[mediaIndex];
6449
6450 return {
6451 requestId: 'segment-loader-' + Math.random(),
6452 // resolve the segment URL relative to the playlist
6453 uri: segment.resolvedUri,
6454 // the segment's mediaIndex at the time it was requested
6455 mediaIndex: mediaIndex,
6456 // whether or not to update the SegmentLoader's state with this
6457 // segment's mediaIndex
6458 isSyncRequest: isSyncRequest,
6459 startOfSegment: startOfSegment,
6460 // the segment's playlist
6461 playlist: playlist,
6462 // unencrypted bytes of the segment
6463 bytes: null,
6464 // when a key is defined for this segment, the encrypted bytes
6465 encryptedBytes: null,
6466 // The target timestampOffset for this segment when we append it
6467 // to the source buffer
6468 timestampOffset: null,
6469 // The timeline that the segment is in
6470 timeline: segment.timeline,
6471 // The expected duration of the segment in seconds
6472 duration: segment.duration,
6473 // retain the segment in case the playlist updates while doing an async process
6474 segment: segment
6475 };
6476 }
6477
6478 /**
6479 * Determines if the network has enough bandwidth to complete the current segment
6480 * request in a timely manner. If not, the request will be aborted early and bandwidth
6481 * updated to trigger a playlist switch.
6482 *
6483 * @param {Object} stats
6484 * Object containing stats about the request timing and size
6485 * @return {Boolean} True if the request was aborted, false otherwise
6486 * @private
6487 */
6488 }, {
6489 key: 'abortRequestEarly_',
6490 value: function abortRequestEarly_(stats) {
6491 if (this.hls_.tech_.paused() ||
6492 // Don't abort if the current playlist is on the lowestEnabledRendition
6493 // TODO: Replace using timeout with a boolean indicating whether this playlist is
6494 // the lowestEnabledRendition.
6495 !this.xhrOptions_.timeout ||
6496 // Don't abort if we have no bandwidth information to estimate segment sizes
6497 !this.playlist_.attributes.BANDWIDTH) {
6498 return false;
6499 }
6500
6501 // Wait at least 1 second since the first byte of data has been received before
6502 // using the calculated bandwidth from the progress event to allow the bitrate
6503 // to stabilize
6504 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
6505 return false;
6506 }
6507
6508 var currentTime = this.currentTime_();
6509 var measuredBandwidth = stats.bandwidth;
6510 var segmentDuration = this.pendingSegment_.duration;
6511
6512 var requestTimeRemaining = _playlist2['default'].estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived);
6513
6514 // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
6515 // if we are only left with less than 1 second when the request completes.
6516 // A negative timeUntilRebuffering indicates we are already rebuffering
6517 var timeUntilRebuffer = (0, _ranges.timeUntilRebuffer)(this.buffered_(), currentTime, this.hls_.tech_.playbackRate()) - 1;
6518
6519 // Only consider aborting early if the estimated time to finish the download
6520 // is larger than the estimated time until the player runs out of forward buffer
6521 if (requestTimeRemaining <= timeUntilRebuffer) {
6522 return false;
6523 }
6524
6525 var switchCandidate = (0, _playlistSelectors.minRebufferMaxBandwidthSelector)({
6526 master: this.hls_.playlists.master,
6527 currentTime: currentTime,
6528 bandwidth: measuredBandwidth,
6529 duration: this.duration_(),
6530 segmentDuration: segmentDuration,
6531 timeUntilRebuffer: timeUntilRebuffer,
6532 currentTimeline: this.currentTimeline_,
6533 syncController: this.syncController_
6534 });
6535
6536 if (!switchCandidate) {
6537 return;
6538 }
6539
6540 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer;
6541
6542 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
6543
6544 var minimumTimeSaving = 0.5;
6545
6546 // If we are already rebuffering, increase the amount of variance we add to the
6547 // potential round trip time of the new request so that we are not too aggressive
6548 // with switching to a playlist that might save us a fraction of a second.
6549 if (timeUntilRebuffer <= _ranges.TIME_FUDGE_FACTOR) {
6550 minimumTimeSaving = 1;
6551 }
6552
6553 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
6554 return false;
6555 }
6556
6557 // set the bandwidth to that of the desired playlist being sure to scale by
6558 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
6559 // don't trigger a bandwidthupdate as the bandwidth is artifial
6560 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * _config2['default'].BANDWIDTH_VARIANCE + 1;
6561 this.abort();
6562 this.trigger('earlyabort');
6563 return true;
6564 }
6565
6566 /**
6567 * XHR `progress` event handler
6568 *
6569 * @param {Event}
6570 * The XHR `progress` event
6571 * @param {Object} simpleSegment
6572 * A simplified segment object copy
6573 * @private
6574 */
6575 }, {
6576 key: 'handleProgress_',
6577 value: function handleProgress_(event, simpleSegment) {
6578 if (!this.pendingSegment_ || simpleSegment.requestId !== this.pendingSegment_.requestId || this.abortRequestEarly_(simpleSegment.stats)) {
6579 return;
6580 }
6581
6582 this.trigger('progress');
6583 }
6584
6585 /**
6586 * load a specific segment from a request into the buffer
6587 *
6588 * @private
6589 */
6590 }, {
6591 key: 'loadSegment_',
6592 value: function loadSegment_(segmentInfo) {
6593 this.state = 'WAITING';
6594 this.pendingSegment_ = segmentInfo;
6595 this.trimBackBuffer_(segmentInfo);
6596
6597 segmentInfo.abortRequests = (0, _mediaSegmentRequest.mediaSegmentRequest)(this.hls_.xhr, this.xhrOptions_, this.decrypter_, this.createSimplifiedSegmentObj_(segmentInfo),
6598 // progress callback
6599 this.handleProgress_.bind(this), this.segmentRequestFinished_.bind(this));
6600 }
6601
6602 /**
6603 * trim the back buffer so that we don't have too much data
6604 * in the source buffer
6605 *
6606 * @private
6607 *
6608 * @param {Object} segmentInfo - the current segment
6609 */
6610 }, {
6611 key: 'trimBackBuffer_',
6612 value: function trimBackBuffer_(segmentInfo) {
6613 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10);
6614
6615 // Chrome has a hard limit of 150MB of
6616 // buffer and a very conservative "garbage collector"
6617 // We manually clear out the old buffer to ensure
6618 // we don't trigger the QuotaExceeded error
6619 // on the source buffer during subsequent appends
6620
6621 if (removeToTime > 0) {
6622 this.remove(0, removeToTime);
6623 }
6624 }
6625
6626 /**
6627 * created a simplified copy of the segment object with just the
6628 * information necessary to perform the XHR and decryption
6629 *
6630 * @private
6631 *
6632 * @param {Object} segmentInfo - the current segment
6633 * @returns {Object} a simplified segment object copy
6634 */
6635 }, {
6636 key: 'createSimplifiedSegmentObj_',
6637 value: function createSimplifiedSegmentObj_(segmentInfo) {
6638 var segment = segmentInfo.segment;
6639 var simpleSegment = {
6640 resolvedUri: segment.resolvedUri,
6641 byterange: segment.byterange,
6642 requestId: segmentInfo.requestId
6643 };
6644
6645 if (segment.key) {
6646 // if the media sequence is greater than 2^32, the IV will be incorrect
6647 // assuming 10s segments, that would be about 1300 years
6648 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
6649
6650 simpleSegment.key = {
6651 resolvedUri: segment.key.resolvedUri,
6652 iv: iv
6653 };
6654 }
6655
6656 if (segment.map) {
6657 simpleSegment.map = this.initSegment(segment.map);
6658 }
6659
6660 return simpleSegment;
6661 }
6662
6663 /**
6664 * Handle the callback from the segmentRequest function and set the
6665 * associated SegmentLoader state and errors if necessary
6666 *
6667 * @private
6668 */
6669 }, {
6670 key: 'segmentRequestFinished_',
6671 value: function segmentRequestFinished_(error, simpleSegment) {
6672 // every request counts as a media request even if it has been aborted
6673 // or canceled due to a timeout
6674 this.mediaRequests += 1;
6675
6676 if (simpleSegment.stats) {
6677 this.mediaBytesTransferred += simpleSegment.stats.bytesReceived;
6678 this.mediaTransferDuration += simpleSegment.stats.roundTripTime;
6679 }
6680
6681 // The request was aborted and the SegmentLoader has already been reset
6682 if (!this.pendingSegment_) {
6683 this.mediaRequestsAborted += 1;
6684 return;
6685 }
6686
6687 // the request was aborted and the SegmentLoader has already started
6688 // another request. this can happen when the timeout for an aborted
6689 // request triggers due to a limitation in the XHR library
6690 // do not count this as any sort of request or we risk double-counting
6691 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
6692 return;
6693 }
6694
6695 // an error occurred from the active pendingSegment_ so reset everything
6696 if (error) {
6697 this.pendingSegment_ = null;
6698 this.state = 'READY';
6699
6700 // the requests were aborted just record the aborted stat and exit
6701 // this is not a true error condition and nothing corrective needs
6702 // to be done
6703 if (error.code === _mediaSegmentRequest.REQUEST_ERRORS.ABORTED) {
6704 this.mediaRequestsAborted += 1;
6705 return;
6706 }
6707
6708 this.pause();
6709
6710 // the error is really just that at least one of the requests timed-out
6711 // set the bandwidth to a very low value and trigger an ABR switch to
6712 // take emergency action
6713 if (error.code === _mediaSegmentRequest.REQUEST_ERRORS.TIMEOUT) {
6714 this.mediaRequestsTimedout += 1;
6715 this.bandwidth = 1;
6716 this.roundTrip = NaN;
6717 this.trigger('bandwidthupdate');
6718 return;
6719 }
6720
6721 // if control-flow has arrived here, then the error is real
6722 // emit an error event to blacklist the current playlist
6723 this.mediaRequestsErrored += 1;
6724 this.error(error);
6725 this.trigger('error');
6726 return;
6727 }
6728
6729 // the response was a success so set any bandwidth stats the request
6730 // generated for ABR purposes
6731 this.bandwidth = simpleSegment.stats.bandwidth;
6732 this.roundTrip = simpleSegment.stats.roundTripTime;
6733
6734 // if this request included an initialization segment, save that data
6735 // to the initSegment cache
6736 if (simpleSegment.map) {
6737 simpleSegment.map = this.initSegment(simpleSegment.map, true);
6738 }
6739
6740 this.processSegmentResponse_(simpleSegment);
6741 }
6742
6743 /**
6744 * Move any important data from the simplified segment object
6745 * back to the real segment object for future phases
6746 *
6747 * @private
6748 */
6749 }, {
6750 key: 'processSegmentResponse_',
6751 value: function processSegmentResponse_(simpleSegment) {
6752 var segmentInfo = this.pendingSegment_;
6753
6754 segmentInfo.bytes = simpleSegment.bytes;
6755 if (simpleSegment.map) {
6756 segmentInfo.segment.map.bytes = simpleSegment.map.bytes;
6757 }
6758
6759 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
6760 this.handleSegment_();
6761 }
6762
6763 /**
6764 * append a decrypted segement to the SourceBuffer through a SourceUpdater
6765 *
6766 * @private
6767 */
6768 }, {
6769 key: 'handleSegment_',
6770 value: function handleSegment_() {
6771 var _this3 = this;
6772
6773 if (!this.pendingSegment_) {
6774 this.state = 'READY';
6775 return;
6776 }
6777
6778 var segmentInfo = this.pendingSegment_;
6779 var segment = segmentInfo.segment;
6780 var timingInfo = this.syncController_.probeSegmentInfo(segmentInfo);
6781
6782 // When we have our first timing info, determine what media types this loader is
6783 // dealing with. Although we're maintaining extra state, it helps to preserve the
6784 // separation of segment loader from the actual source buffers.
6785 if (typeof this.startingMedia_ === 'undefined' && timingInfo && (
6786 // Guard against cases where we're not getting timing info at all until we are
6787 // certain that all streams will provide it.
6788 timingInfo.containsAudio || timingInfo.containsVideo)) {
6789 this.startingMedia_ = {
6790 containsAudio: timingInfo.containsAudio,
6791 containsVideo: timingInfo.containsVideo
6792 };
6793 }
6794
6795 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.startingMedia_, timingInfo);
6796
6797 if (illegalMediaSwitchError) {
6798 this.error({
6799 message: illegalMediaSwitchError,
6800 blacklistDuration: Infinity
6801 });
6802 this.trigger('error');
6803 return;
6804 }
6805
6806 if (segmentInfo.isSyncRequest) {
6807 this.trigger('syncinfoupdate');
6808 this.pendingSegment_ = null;
6809 this.state = 'READY';
6810 return;
6811 }
6812
6813 if (segmentInfo.timestampOffset !== null && segmentInfo.timestampOffset !== this.sourceUpdater_.timestampOffset()) {
6814 this.sourceUpdater_.timestampOffset(segmentInfo.timestampOffset);
6815 // fired when a timestamp offset is set in HLS (can also identify discontinuities)
6816 this.trigger('timestampoffset');
6817 }
6818
6819 var timelineMapping = this.syncController_.mappingForTimeline(segmentInfo.timeline);
6820
6821 if (timelineMapping !== null) {
6822 this.trigger({
6823 type: 'segmenttimemapping',
6824 mapping: timelineMapping
6825 });
6826 }
6827
6828 this.state = 'APPENDING';
6829
6830 // if the media initialization segment is changing, append it
6831 // before the content segment
6832 if (segment.map) {
6833 (function () {
6834 var initId = (0, _binUtils.initSegmentId)(segment.map);
6835
6836 if (!_this3.activeInitSegmentId_ || _this3.activeInitSegmentId_ !== initId) {
6837 var initSegment = _this3.initSegment(segment.map);
6838
6839 _this3.sourceUpdater_.appendBuffer(initSegment.bytes, function () {
6840 _this3.activeInitSegmentId_ = initId;
6841 });
6842 }
6843 })();
6844 }
6845
6846 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
6847 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
6848 this.mediaSecondsLoaded += segment.end - segment.start;
6849 } else {
6850 this.mediaSecondsLoaded += segment.duration;
6851 }
6852
6853 this.sourceUpdater_.appendBuffer(segmentInfo.bytes, this.handleUpdateEnd_.bind(this));
6854 }
6855
6856 /**
6857 * callback to run when appendBuffer is finished. detects if we are
6858 * in a good state to do things with the data we got, or if we need
6859 * to wait for more
6860 *
6861 * @private
6862 */
6863 }, {
6864 key: 'handleUpdateEnd_',
6865 value: function handleUpdateEnd_() {
6866 this.logger_('handleUpdateEnd_', 'segmentInfo:', this.pendingSegment_);
6867
6868 if (!this.pendingSegment_) {
6869 this.state = 'READY';
6870 if (!this.paused()) {
6871 this.monitorBuffer_();
6872 }
6873 return;
6874 }
6875
6876 var segmentInfo = this.pendingSegment_;
6877 var segment = segmentInfo.segment;
6878 var isWalkingForward = this.mediaIndex !== null;
6879
6880 this.pendingSegment_ = null;
6881 this.recordThroughput_(segmentInfo);
6882 this.addSegmentMetadataCue_(segmentInfo);
6883
6884 this.state = 'READY';
6885
6886 this.mediaIndex = segmentInfo.mediaIndex;
6887 this.fetchAtBuffer_ = true;
6888 this.currentTimeline_ = segmentInfo.timeline;
6889
6890 // We must update the syncinfo to recalculate the seekable range before
6891 // the following conditional otherwise it may consider this a bad "guess"
6892 // and attempt to resync when the post-update seekable window and live
6893 // point would mean that this was the perfect segment to fetch
6894 this.trigger('syncinfoupdate');
6895
6896 // If we previously appended a segment that ends more than 3 targetDurations before
6897 // the currentTime_ that means that our conservative guess was too conservative.
6898 // In that case, reset the loader state so that we try to use any information gained
6899 // from the previous request to create a new, more accurate, sync-point.
6900 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
6901 this.resetEverything();
6902 return;
6903 }
6904
6905 // Don't do a rendition switch unless we have enough time to get a sync segment
6906 // and conservatively guess
6907 if (isWalkingForward) {
6908 this.trigger('bandwidthupdate');
6909 }
6910 this.trigger('progress');
6911
6912 // any time an update finishes and the last segment is in the
6913 // buffer, end the stream. this ensures the "ended" event will
6914 // fire if playback reaches that point.
6915 var isEndOfStream = detectEndOfStream(segmentInfo.playlist, this.mediaSource_, segmentInfo.mediaIndex + 1);
6916
6917 if (isEndOfStream) {
6918 this.endOfStream();
6919 }
6920
6921 if (!this.paused()) {
6922 this.monitorBuffer_();
6923 }
6924 }
6925
6926 /**
6927 * Records the current throughput of the decrypt, transmux, and append
6928 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
6929 * moving average of the throughput. `throughput.count` is the number of
6930 * data points in the average.
6931 *
6932 * @private
6933 * @param {Object} segmentInfo the object returned by loadSegment
6934 */
6935 }, {
6936 key: 'recordThroughput_',
6937 value: function recordThroughput_(segmentInfo) {
6938 var rate = this.throughput.rate;
6939 // Add one to the time to ensure that we don't accidentally attempt to divide
6940 // by zero in the case where the throughput is ridiculously high
6941 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1;
6942 // Multiply by 8000 to convert from bytes/millisecond to bits/second
6943 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000);
6944
6945 // This is just a cumulative moving average calculation:
6946 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
6947 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
6948 }
6949
6950 /**
6951 * A debugging logger noop that is set to console.log only if debugging
6952 * is enabled globally
6953 *
6954 * @private
6955 */
6956 }, {
6957 key: 'logger_',
6958 value: function logger_() {}
6959
6960 /**
6961 * Adds a cue to the segment-metadata track with some metadata information about the
6962 * segment
6963 *
6964 * @private
6965 * @param {Object} segmentInfo
6966 * the object returned by loadSegment
6967 * @method addSegmentMetadataCue_
6968 */
6969 }, {
6970 key: 'addSegmentMetadataCue_',
6971 value: function addSegmentMetadataCue_(segmentInfo) {
6972 if (!this.segmentMetadataTrack_) {
6973 return;
6974 }
6975
6976 var segment = segmentInfo.segment;
6977 var start = segment.start;
6978 var end = segment.end;
6979
6980 // Do not try adding the cue if the start and end times are invalid.
6981 if (!finite(start) || !finite(end)) {
6982 return;
6983 }
6984
6985 (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.segmentMetadataTrack_);
6986
6987 var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
6988 var value = {
6989 uri: segmentInfo.uri,
6990 timeline: segmentInfo.timeline,
6991 playlist: segmentInfo.playlist.uri,
6992 start: start,
6993 end: end
6994 };
6995 var data = JSON.stringify(value);
6996 var cue = new Cue(start, end, data);
6997
6998 // Attach the metadata to the value property of the cue to keep consistency between
6999 // the differences of WebKitDataCue in safari and VTTCue in other browsers
7000 cue.value = value;
7001
7002 this.segmentMetadataTrack_.addCue(cue);
7003 }
7004 }]);
7005
7006 return SegmentLoader;
7007})(_videoJs2['default'].EventTarget);
7008
7009exports['default'] = SegmentLoader;
7010}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
7011},{"./bin-utils":2,"./config":3,"./media-segment-request":7,"./playlist":11,"./playlist-selectors":10,"./ranges":12,"./source-updater":17,"global/window":32,"videojs-contrib-media-sources/es5/remove-cues-from-track.js":72}],17:[function(require,module,exports){
7012(function (global){
7013/**
7014 * @file source-updater.js
7015 */
7016'use strict';
7017
7018Object.defineProperty(exports, '__esModule', {
7019 value: true
7020});
7021
7022var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
7023
7024function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
7025
7026function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
7027
7028var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
7029
7030var _videoJs2 = _interopRequireDefault(_videoJs);
7031
7032var noop = function noop() {};
7033
7034/**
7035 * A queue of callbacks to be serialized and applied when a
7036 * MediaSource and its associated SourceBuffers are not in the
7037 * updating state. It is used by the segment loader to update the
7038 * underlying SourceBuffers when new data is loaded, for instance.
7039 *
7040 * @class SourceUpdater
7041 * @param {MediaSource} mediaSource the MediaSource to create the
7042 * SourceBuffer from
7043 * @param {String} mimeType the desired MIME type of the underlying
7044 * SourceBuffer
7045 */
7046
7047var SourceUpdater = (function () {
7048 function SourceUpdater(mediaSource, mimeType) {
7049 var _this = this;
7050
7051 _classCallCheck(this, SourceUpdater);
7052
7053 var createSourceBuffer = function createSourceBuffer() {
7054 _this.sourceBuffer_ = mediaSource.addSourceBuffer(mimeType);
7055
7056 // run completion handlers and process callbacks as updateend
7057 // events fire
7058 _this.onUpdateendCallback_ = function () {
7059 var pendingCallback = _this.pendingCallback_;
7060
7061 _this.pendingCallback_ = null;
7062
7063 if (pendingCallback) {
7064 pendingCallback();
7065 }
7066
7067 _this.runCallback_();
7068 };
7069
7070 _this.sourceBuffer_.addEventListener('updateend', _this.onUpdateendCallback_);
7071
7072 _this.runCallback_();
7073 };
7074
7075 this.callbacks_ = [];
7076 this.pendingCallback_ = null;
7077 this.timestampOffset_ = 0;
7078 this.mediaSource = mediaSource;
7079 this.processedAppend_ = false;
7080
7081 if (mediaSource.readyState === 'closed') {
7082 mediaSource.addEventListener('sourceopen', createSourceBuffer);
7083 } else {
7084 createSourceBuffer();
7085 }
7086 }
7087
7088 /**
7089 * Aborts the current segment and resets the segment parser.
7090 *
7091 * @param {Function} done function to call when done
7092 * @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
7093 */
7094
7095 _createClass(SourceUpdater, [{
7096 key: 'abort',
7097 value: function abort(done) {
7098 var _this2 = this;
7099
7100 if (this.processedAppend_) {
7101 this.queueCallback_(function () {
7102 _this2.sourceBuffer_.abort();
7103 }, done);
7104 }
7105 }
7106
7107 /**
7108 * Queue an update to append an ArrayBuffer.
7109 *
7110 * @param {ArrayBuffer} bytes
7111 * @param {Function} done the function to call when done
7112 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
7113 */
7114 }, {
7115 key: 'appendBuffer',
7116 value: function appendBuffer(bytes, done) {
7117 var _this3 = this;
7118
7119 this.processedAppend_ = true;
7120 this.queueCallback_(function () {
7121 _this3.sourceBuffer_.appendBuffer(bytes);
7122 }, done);
7123 }
7124
7125 /**
7126 * Indicates what TimeRanges are buffered in the managed SourceBuffer.
7127 *
7128 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-buffered
7129 */
7130 }, {
7131 key: 'buffered',
7132 value: function buffered() {
7133 if (!this.sourceBuffer_) {
7134 return _videoJs2['default'].createTimeRanges();
7135 }
7136 return this.sourceBuffer_.buffered;
7137 }
7138
7139 /**
7140 * Queue an update to remove a time range from the buffer.
7141 *
7142 * @param {Number} start where to start the removal
7143 * @param {Number} end where to end the removal
7144 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
7145 */
7146 }, {
7147 key: 'remove',
7148 value: function remove(start, end) {
7149 var _this4 = this;
7150
7151 if (this.processedAppend_) {
7152 this.queueCallback_(function () {
7153 _this4.sourceBuffer_.remove(start, end);
7154 }, noop);
7155 }
7156 }
7157
7158 /**
7159 * Whether the underlying sourceBuffer is updating or not
7160 *
7161 * @return {Boolean} the updating status of the SourceBuffer
7162 */
7163 }, {
7164 key: 'updating',
7165 value: function updating() {
7166 return !this.sourceBuffer_ || this.sourceBuffer_.updating || this.pendingCallback_;
7167 }
7168
7169 /**
7170 * Set/get the timestampoffset on the SourceBuffer
7171 *
7172 * @return {Number} the timestamp offset
7173 */
7174 }, {
7175 key: 'timestampOffset',
7176 value: function timestampOffset(offset) {
7177 var _this5 = this;
7178
7179 if (typeof offset !== 'undefined') {
7180 this.queueCallback_(function () {
7181 _this5.sourceBuffer_.timestampOffset = offset;
7182 });
7183 this.timestampOffset_ = offset;
7184 }
7185 return this.timestampOffset_;
7186 }
7187
7188 /**
7189 * Queue a callback to run
7190 */
7191 }, {
7192 key: 'queueCallback_',
7193 value: function queueCallback_(callback, done) {
7194 this.callbacks_.push([callback.bind(this), done]);
7195 this.runCallback_();
7196 }
7197
7198 /**
7199 * Run a queued callback
7200 */
7201 }, {
7202 key: 'runCallback_',
7203 value: function runCallback_() {
7204 var callbacks = undefined;
7205
7206 if (!this.updating() && this.callbacks_.length) {
7207 callbacks = this.callbacks_.shift();
7208 this.pendingCallback_ = callbacks[1];
7209 callbacks[0]();
7210 }
7211 }
7212
7213 /**
7214 * dispose of the source updater and the underlying sourceBuffer
7215 */
7216 }, {
7217 key: 'dispose',
7218 value: function dispose() {
7219 this.sourceBuffer_.removeEventListener('updateend', this.onUpdateendCallback_);
7220 if (this.sourceBuffer_ && this.mediaSource.readyState === 'open') {
7221 this.sourceBuffer_.abort();
7222 }
7223 }
7224 }]);
7225
7226 return SourceUpdater;
7227})();
7228
7229exports['default'] = SourceUpdater;
7230module.exports = exports['default'];
7231}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
7232},{}],18:[function(require,module,exports){
7233(function (global){
7234/**
7235 * @file sync-controller.js
7236 */
7237
7238'use strict';
7239
7240Object.defineProperty(exports, '__esModule', {
7241 value: true
7242});
7243
7244var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
7245
7246var _get = function get(_x2, _x3, _x4) { var _again = true; _function: while (_again) { var object = _x2, property = _x3, receiver = _x4; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x2 = parent; _x3 = property; _x4 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
7247
7248function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
7249
7250function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
7251
7252function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
7253
7254var _muxJsLibMp4Probe = require('mux.js/lib/mp4/probe');
7255
7256var _muxJsLibMp4Probe2 = _interopRequireDefault(_muxJsLibMp4Probe);
7257
7258var _muxJsLibToolsTsInspectorJs = require('mux.js/lib/tools/ts-inspector.js');
7259
7260var _playlist = require('./playlist');
7261
7262var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
7263
7264var _videoJs2 = _interopRequireDefault(_videoJs);
7265
7266var syncPointStrategies = [
7267// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
7268// the equivalence display-time 0 === segment-index 0
7269{
7270 name: 'VOD',
7271 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
7272 if (duration !== Infinity) {
7273 var syncPoint = {
7274 time: 0,
7275 segmentIndex: 0
7276 };
7277
7278 return syncPoint;
7279 }
7280 return null;
7281 }
7282},
7283// Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
7284{
7285 name: 'ProgramDateTime',
7286 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
7287 if (syncController.datetimeToDisplayTime && playlist.dateTimeObject) {
7288 var playlistTime = playlist.dateTimeObject.getTime() / 1000;
7289 var playlistStart = playlistTime + syncController.datetimeToDisplayTime;
7290 var syncPoint = {
7291 time: playlistStart,
7292 segmentIndex: 0
7293 };
7294
7295 return syncPoint;
7296 }
7297 return null;
7298 }
7299},
7300// Stategy "Segment": We have a known time mapping for a timeline and a
7301// segment in the current timeline with timing data
7302{
7303 name: 'Segment',
7304 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
7305 var segments = playlist.segments || [];
7306 var syncPoint = null;
7307 var lastDistance = null;
7308
7309 currentTime = currentTime || 0;
7310
7311 for (var i = 0; i < segments.length; i++) {
7312 var segment = segments[i];
7313
7314 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
7315 var distance = Math.abs(currentTime - segment.start);
7316
7317 // Once the distance begins to increase, we have passed
7318 // currentTime and can stop looking for better candidates
7319 if (lastDistance !== null && lastDistance < distance) {
7320 break;
7321 }
7322
7323 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
7324 lastDistance = distance;
7325 syncPoint = {
7326 time: segment.start,
7327 segmentIndex: i
7328 };
7329 }
7330 }
7331 }
7332 return syncPoint;
7333 }
7334},
7335// Stategy "Discontinuity": We have a discontinuity with a known
7336// display-time
7337{
7338 name: 'Discontinuity',
7339 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
7340 var syncPoint = null;
7341
7342 currentTime = currentTime || 0;
7343
7344 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
7345 var lastDistance = null;
7346
7347 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
7348 var segmentIndex = playlist.discontinuityStarts[i];
7349 var discontinuity = playlist.discontinuitySequence + i + 1;
7350 var discontinuitySync = syncController.discontinuities[discontinuity];
7351
7352 if (discontinuitySync) {
7353 var distance = Math.abs(currentTime - discontinuitySync.time);
7354
7355 // Once the distance begins to increase, we have passed
7356 // currentTime and can stop looking for better candidates
7357 if (lastDistance !== null && lastDistance < distance) {
7358 break;
7359 }
7360
7361 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
7362 lastDistance = distance;
7363 syncPoint = {
7364 time: discontinuitySync.time,
7365 segmentIndex: segmentIndex
7366 };
7367 }
7368 }
7369 }
7370 }
7371 return syncPoint;
7372 }
7373},
7374// Stategy "Playlist": We have a playlist with a known mapping of
7375// segment index to display time
7376{
7377 name: 'Playlist',
7378 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
7379 if (playlist.syncInfo) {
7380 var syncPoint = {
7381 time: playlist.syncInfo.time,
7382 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
7383 };
7384
7385 return syncPoint;
7386 }
7387 return null;
7388 }
7389}];
7390
7391exports.syncPointStrategies = syncPointStrategies;
7392
7393var SyncController = (function (_videojs$EventTarget) {
7394 _inherits(SyncController, _videojs$EventTarget);
7395
7396 function SyncController() {
7397 var options = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0];
7398
7399 _classCallCheck(this, SyncController);
7400
7401 _get(Object.getPrototypeOf(SyncController.prototype), 'constructor', this).call(this);
7402 // Segment Loader state variables...
7403 // ...for synching across variants
7404 this.inspectCache_ = undefined;
7405
7406 // ...for synching across variants
7407 this.timelines = [];
7408 this.discontinuities = [];
7409 this.datetimeToDisplayTime = null;
7410
7411 if (options.debug) {
7412 this.logger_ = _videoJs2['default'].log.bind(_videoJs2['default'], 'sync-controller ->');
7413 }
7414 }
7415
7416 /**
7417 * Find a sync-point for the playlist specified
7418 *
7419 * A sync-point is defined as a known mapping from display-time to
7420 * a segment-index in the current playlist.
7421 *
7422 * @param {Playlist} playlist
7423 * The playlist that needs a sync-point
7424 * @param {Number} duration
7425 * Duration of the MediaSource (Infinite if playing a live source)
7426 * @param {Number} currentTimeline
7427 * The last timeline from which a segment was loaded
7428 * @returns {Object}
7429 * A sync-point object
7430 */
7431
7432 _createClass(SyncController, [{
7433 key: 'getSyncPoint',
7434 value: function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
7435 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
7436
7437 if (!syncPoints.length) {
7438 // Signal that we need to attempt to get a sync-point manually
7439 // by fetching a segment in the playlist and constructing
7440 // a sync-point from that information
7441 return null;
7442 }
7443
7444 // Now find the sync-point that is closest to the currentTime because
7445 // that should result in the most accurate guess about which segment
7446 // to fetch
7447 return this.selectSyncPoint_(syncPoints, { key: 'time', value: currentTime });
7448 }
7449
7450 /**
7451 * Calculate the amount of time that has expired off the playlist during playback
7452 *
7453 * @param {Playlist} playlist
7454 * Playlist object to calculate expired from
7455 * @param {Number} duration
7456 * Duration of the MediaSource (Infinity if playling a live source)
7457 * @returns {Number|null}
7458 * The amount of time that has expired off the playlist during playback. Null
7459 * if no sync-points for the playlist can be found.
7460 */
7461 }, {
7462 key: 'getExpiredTime',
7463 value: function getExpiredTime(playlist, duration) {
7464 if (!playlist || !playlist.segments) {
7465 return null;
7466 }
7467
7468 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0);
7469
7470 // Without sync-points, there is not enough information to determine the expired time
7471 if (!syncPoints.length) {
7472 return null;
7473 }
7474
7475 var syncPoint = this.selectSyncPoint_(syncPoints, {
7476 key: 'segmentIndex',
7477 value: 0
7478 });
7479
7480 // If the sync-point is beyond the start of the playlist, we want to subtract the
7481 // duration from index 0 to syncPoint.segmentIndex instead of adding.
7482 if (syncPoint.segmentIndex > 0) {
7483 syncPoint.time *= -1;
7484 }
7485
7486 return Math.abs(syncPoint.time + (0, _playlist.sumDurations)(playlist, syncPoint.segmentIndex, 0));
7487 }
7488
7489 /**
7490 * Runs each sync-point strategy and returns a list of sync-points returned by the
7491 * strategies
7492 *
7493 * @private
7494 * @param {Playlist} playlist
7495 * The playlist that needs a sync-point
7496 * @param {Number} duration
7497 * Duration of the MediaSource (Infinity if playing a live source)
7498 * @param {Number} currentTimeline
7499 * The last timeline from which a segment was loaded
7500 * @returns {Array}
7501 * A list of sync-point objects
7502 */
7503 }, {
7504 key: 'runStrategies_',
7505 value: function runStrategies_(playlist, duration, currentTimeline, currentTime) {
7506 var syncPoints = [];
7507
7508 // Try to find a sync-point in by utilizing various strategies...
7509 for (var i = 0; i < syncPointStrategies.length; i++) {
7510 var strategy = syncPointStrategies[i];
7511 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
7512
7513 if (syncPoint) {
7514 syncPoint.strategy = strategy.name;
7515 syncPoints.push({
7516 strategy: strategy.name,
7517 syncPoint: syncPoint
7518 });
7519 this.logger_('syncPoint found via <' + strategy.name + '>:', syncPoint);
7520 }
7521 }
7522
7523 return syncPoints;
7524 }
7525
7526 /**
7527 * Selects the sync-point nearest the specified target
7528 *
7529 * @private
7530 * @param {Array} syncPoints
7531 * List of sync-points to select from
7532 * @param {Object} target
7533 * Object specifying the property and value we are targeting
7534 * @param {String} target.key
7535 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
7536 * @param {Number} target.value
7537 * The value to target for the specified key.
7538 * @returns {Object}
7539 * The sync-point nearest the target
7540 */
7541 }, {
7542 key: 'selectSyncPoint_',
7543 value: function selectSyncPoint_(syncPoints, target) {
7544 var bestSyncPoint = syncPoints[0].syncPoint;
7545 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
7546 var bestStrategy = syncPoints[0].strategy;
7547
7548 for (var i = 1; i < syncPoints.length; i++) {
7549 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
7550
7551 if (newDistance < bestDistance) {
7552 bestDistance = newDistance;
7553 bestSyncPoint = syncPoints[i].syncPoint;
7554 bestStrategy = syncPoints[i].strategy;
7555 }
7556 }
7557
7558 this.logger_('syncPoint with strategy <' + bestStrategy + '> chosen: ', bestSyncPoint);
7559 return bestSyncPoint;
7560 }
7561
7562 /**
7563 * Save any meta-data present on the segments when segments leave
7564 * the live window to the playlist to allow for synchronization at the
7565 * playlist level later.
7566 *
7567 * @param {Playlist} oldPlaylist - The previous active playlist
7568 * @param {Playlist} newPlaylist - The updated and most current playlist
7569 */
7570 }, {
7571 key: 'saveExpiredSegmentInfo',
7572 value: function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
7573 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
7574
7575 // When a segment expires from the playlist and it has a start time
7576 // save that information as a possible sync-point reference in future
7577 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
7578 var lastRemovedSegment = oldPlaylist.segments[i];
7579
7580 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
7581 newPlaylist.syncInfo = {
7582 mediaSequence: oldPlaylist.mediaSequence + i,
7583 time: lastRemovedSegment.start
7584 };
7585 this.logger_('playlist sync:', newPlaylist.syncInfo);
7586 this.trigger('syncinfoupdate');
7587 break;
7588 }
7589 }
7590 }
7591
7592 /**
7593 * Save the mapping from playlist's ProgramDateTime to display. This should
7594 * only ever happen once at the start of playback.
7595 *
7596 * @param {Playlist} playlist - The currently active playlist
7597 */
7598 }, {
7599 key: 'setDateTimeMapping',
7600 value: function setDateTimeMapping(playlist) {
7601 if (!this.datetimeToDisplayTime && playlist.dateTimeObject) {
7602 var playlistTimestamp = playlist.dateTimeObject.getTime() / 1000;
7603
7604 this.datetimeToDisplayTime = -playlistTimestamp;
7605 }
7606 }
7607
7608 /**
7609 * Reset the state of the inspection cache when we do a rendition
7610 * switch
7611 */
7612 }, {
7613 key: 'reset',
7614 value: function reset() {
7615 this.inspectCache_ = undefined;
7616 }
7617
7618 /**
7619 * Probe or inspect a fmp4 or an mpeg2-ts segment to determine the start
7620 * and end of the segment in it's internal "media time". Used to generate
7621 * mappings from that internal "media time" to the display time that is
7622 * shown on the player.
7623 *
7624 * @param {SegmentInfo} segmentInfo - The current active request information
7625 */
7626 }, {
7627 key: 'probeSegmentInfo',
7628 value: function probeSegmentInfo(segmentInfo) {
7629 var segment = segmentInfo.segment;
7630 var playlist = segmentInfo.playlist;
7631 var timingInfo = undefined;
7632
7633 if (segment.map) {
7634 timingInfo = this.probeMp4Segment_(segmentInfo);
7635 } else {
7636 timingInfo = this.probeTsSegment_(segmentInfo);
7637 }
7638
7639 if (timingInfo) {
7640 if (this.calculateSegmentTimeMapping_(segmentInfo, timingInfo)) {
7641 this.saveDiscontinuitySyncInfo_(segmentInfo);
7642
7643 // If the playlist does not have sync information yet, record that information
7644 // now with segment timing information
7645 if (!playlist.syncInfo) {
7646 playlist.syncInfo = {
7647 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
7648 time: segment.start
7649 };
7650 }
7651 }
7652 }
7653
7654 return timingInfo;
7655 }
7656
7657 /**
7658 * Probe an fmp4 or an mpeg2-ts segment to determine the start of the segment
7659 * in it's internal "media time".
7660 *
7661 * @private
7662 * @param {SegmentInfo} segmentInfo - The current active request information
7663 * @return {object} The start and end time of the current segment in "media time"
7664 */
7665 }, {
7666 key: 'probeMp4Segment_',
7667 value: function probeMp4Segment_(segmentInfo) {
7668 var segment = segmentInfo.segment;
7669 var timescales = _muxJsLibMp4Probe2['default'].timescale(segment.map.bytes);
7670 var startTime = _muxJsLibMp4Probe2['default'].startTime(timescales, segmentInfo.bytes);
7671
7672 if (segmentInfo.timestampOffset !== null) {
7673 segmentInfo.timestampOffset -= startTime;
7674 }
7675
7676 return {
7677 start: startTime,
7678 end: startTime + segment.duration
7679 };
7680 }
7681
7682 /**
7683 * Probe an mpeg2-ts segment to determine the start and end of the segment
7684 * in it's internal "media time".
7685 *
7686 * @private
7687 * @param {SegmentInfo} segmentInfo - The current active request information
7688 * @return {object} The start and end time of the current segment in "media time"
7689 */
7690 }, {
7691 key: 'probeTsSegment_',
7692 value: function probeTsSegment_(segmentInfo) {
7693 var timeInfo = (0, _muxJsLibToolsTsInspectorJs.inspect)(segmentInfo.bytes, this.inspectCache_);
7694 var segmentStartTime = undefined;
7695 var segmentEndTime = undefined;
7696
7697 if (!timeInfo) {
7698 return null;
7699 }
7700
7701 if (timeInfo.video && timeInfo.video.length === 2) {
7702 this.inspectCache_ = timeInfo.video[1].dts;
7703 segmentStartTime = timeInfo.video[0].dtsTime;
7704 segmentEndTime = timeInfo.video[1].dtsTime;
7705 } else if (timeInfo.audio && timeInfo.audio.length === 2) {
7706 this.inspectCache_ = timeInfo.audio[1].dts;
7707 segmentStartTime = timeInfo.audio[0].dtsTime;
7708 segmentEndTime = timeInfo.audio[1].dtsTime;
7709 }
7710
7711 return {
7712 start: segmentStartTime,
7713 end: segmentEndTime,
7714 containsVideo: timeInfo.video && timeInfo.video.length === 2,
7715 containsAudio: timeInfo.audio && timeInfo.audio.length === 2
7716 };
7717 }
7718 }, {
7719 key: 'timestampOffsetForTimeline',
7720 value: function timestampOffsetForTimeline(timeline) {
7721 if (typeof this.timelines[timeline] === 'undefined') {
7722 return null;
7723 }
7724 return this.timelines[timeline].time;
7725 }
7726 }, {
7727 key: 'mappingForTimeline',
7728 value: function mappingForTimeline(timeline) {
7729 if (typeof this.timelines[timeline] === 'undefined') {
7730 return null;
7731 }
7732 return this.timelines[timeline].mapping;
7733 }
7734
7735 /**
7736 * Use the "media time" for a segment to generate a mapping to "display time" and
7737 * save that display time to the segment.
7738 *
7739 * @private
7740 * @param {SegmentInfo} segmentInfo
7741 * The current active request information
7742 * @param {object} timingInfo
7743 * The start and end time of the current segment in "media time"
7744 * @returns {Boolean}
7745 * Returns false if segment time mapping could not be calculated
7746 */
7747 }, {
7748 key: 'calculateSegmentTimeMapping_',
7749 value: function calculateSegmentTimeMapping_(segmentInfo, timingInfo) {
7750 var segment = segmentInfo.segment;
7751 var mappingObj = this.timelines[segmentInfo.timeline];
7752
7753 if (segmentInfo.timestampOffset !== null) {
7754 this.logger_('tsO:', segmentInfo.timestampOffset);
7755
7756 mappingObj = {
7757 time: segmentInfo.startOfSegment,
7758 mapping: segmentInfo.startOfSegment - timingInfo.start
7759 };
7760 this.timelines[segmentInfo.timeline] = mappingObj;
7761 this.trigger('timestampoffset');
7762
7763 segment.start = segmentInfo.startOfSegment;
7764 segment.end = timingInfo.end + mappingObj.mapping;
7765 } else if (mappingObj) {
7766 segment.start = timingInfo.start + mappingObj.mapping;
7767 segment.end = timingInfo.end + mappingObj.mapping;
7768 } else {
7769 return false;
7770 }
7771
7772 return true;
7773 }
7774
7775 /**
7776 * Each time we have discontinuity in the playlist, attempt to calculate the location
7777 * in display of the start of the discontinuity and save that. We also save an accuracy
7778 * value so that we save values with the most accuracy (closest to 0.)
7779 *
7780 * @private
7781 * @param {SegmentInfo} segmentInfo - The current active request information
7782 */
7783 }, {
7784 key: 'saveDiscontinuitySyncInfo_',
7785 value: function saveDiscontinuitySyncInfo_(segmentInfo) {
7786 var playlist = segmentInfo.playlist;
7787 var segment = segmentInfo.segment;
7788
7789 // If the current segment is a discontinuity then we know exactly where
7790 // the start of the range and it's accuracy is 0 (greater accuracy values
7791 // mean more approximation)
7792 if (segment.discontinuity) {
7793 this.discontinuities[segment.timeline] = {
7794 time: segment.start,
7795 accuracy: 0
7796 };
7797 } else if (playlist.discontinuityStarts.length) {
7798 // Search for future discontinuities that we can provide better timing
7799 // information for and save that information for sync purposes
7800 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
7801 var segmentIndex = playlist.discontinuityStarts[i];
7802 var discontinuity = playlist.discontinuitySequence + i + 1;
7803 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
7804 var accuracy = Math.abs(mediaIndexDiff);
7805
7806 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
7807 var time = undefined;
7808
7809 if (mediaIndexDiff < 0) {
7810 time = segment.start - (0, _playlist.sumDurations)(playlist, segmentInfo.mediaIndex, segmentIndex);
7811 } else {
7812 time = segment.end + (0, _playlist.sumDurations)(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
7813 }
7814
7815 this.discontinuities[discontinuity] = {
7816 time: time,
7817 accuracy: accuracy
7818 };
7819 }
7820 }
7821 }
7822 }
7823
7824 /**
7825 * A debugging logger noop that is set to console.log only if debugging
7826 * is enabled globally
7827 *
7828 * @private
7829 */
7830 }, {
7831 key: 'logger_',
7832 value: function logger_() {}
7833 }]);
7834
7835 return SyncController;
7836})(_videoJs2['default'].EventTarget);
7837
7838exports['default'] = SyncController;
7839}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
7840},{"./playlist":11,"mux.js/lib/mp4/probe":57,"mux.js/lib/tools/ts-inspector.js":59}],19:[function(require,module,exports){
7841
7842/**
7843 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
7844 * codec strings, or translating codec strings into objects that can be examined.
7845 */
7846
7847/**
7848 * Parses a codec string to retrieve the number of codecs specified,
7849 * the video codec and object type indicator, and the audio profile.
7850 */
7851
7852'use strict';
7853
7854Object.defineProperty(exports, '__esModule', {
7855 value: true
7856});
7857var parseCodecs = function parseCodecs() {
7858 var codecs = arguments.length <= 0 || arguments[0] === undefined ? '' : arguments[0];
7859
7860 var result = {
7861 codecCount: 0
7862 };
7863 var parsed = undefined;
7864
7865 result.codecCount = codecs.split(',').length;
7866 result.codecCount = result.codecCount || 2;
7867
7868 // parse the video codec
7869 parsed = /(^|\s|,)+(avc1)([^ ,]*)/i.exec(codecs);
7870 if (parsed) {
7871 result.videoCodec = parsed[2];
7872 result.videoObjectTypeIndicator = parsed[3];
7873 }
7874
7875 // parse the last field of the audio codec
7876 result.audioProfile = /(^|\s|,)+mp4a.[0-9A-Fa-f]+\.([0-9A-Fa-f]+)/i.exec(codecs);
7877 result.audioProfile = result.audioProfile && result.audioProfile[2];
7878
7879 return result;
7880};
7881exports.parseCodecs = parseCodecs;
7882},{}],20:[function(require,module,exports){
7883(function (global){
7884/**
7885 * @file vtt-segment-loader.js
7886 */
7887'use strict';
7888
7889Object.defineProperty(exports, '__esModule', {
7890 value: true
7891});
7892
7893var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
7894
7895var _get = function get(_x3, _x4, _x5) { var _again = true; _function: while (_again) { var object = _x3, property = _x4, receiver = _x5; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x3 = parent; _x4 = property; _x5 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
7896
7897function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
7898
7899function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
7900
7901function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
7902
7903var _segmentLoader = require('./segment-loader');
7904
7905var _segmentLoader2 = _interopRequireDefault(_segmentLoader);
7906
7907var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
7908
7909var _videoJs2 = _interopRequireDefault(_videoJs);
7910
7911var _globalWindow = require('global/window');
7912
7913var _globalWindow2 = _interopRequireDefault(_globalWindow);
7914
7915var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs = require('videojs-contrib-media-sources/es5/remove-cues-from-track.js');
7916
7917var _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2 = _interopRequireDefault(_videojsContribMediaSourcesEs5RemoveCuesFromTrackJs);
7918
7919var _binUtils = require('./bin-utils');
7920
7921var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
7922 return char.charCodeAt(0);
7923}));
7924
7925var uintToString = function uintToString(uintArray) {
7926 return String.fromCharCode.apply(null, uintArray);
7927};
7928
7929/**
7930 * An object that manages segment loading and appending.
7931 *
7932 * @class VTTSegmentLoader
7933 * @param {Object} options required and optional options
7934 * @extends videojs.EventTarget
7935 */
7936
7937var VTTSegmentLoader = (function (_SegmentLoader) {
7938 _inherits(VTTSegmentLoader, _SegmentLoader);
7939
7940 function VTTSegmentLoader(settings) {
7941 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
7942
7943 _classCallCheck(this, VTTSegmentLoader);
7944
7945 _get(Object.getPrototypeOf(VTTSegmentLoader.prototype), 'constructor', this).call(this, settings, options);
7946
7947 // SegmentLoader requires a MediaSource be specified or it will throw an error;
7948 // however, VTTSegmentLoader has no need of a media source, so delete the reference
7949 this.mediaSource_ = null;
7950
7951 this.subtitlesTrack_ = null;
7952 }
7953
7954 /**
7955 * Indicates which time ranges are buffered
7956 *
7957 * @return {TimeRange}
7958 * TimeRange object representing the current buffered ranges
7959 */
7960
7961 _createClass(VTTSegmentLoader, [{
7962 key: 'buffered_',
7963 value: function buffered_() {
7964 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues.length) {
7965 return _videoJs2['default'].createTimeRanges();
7966 }
7967
7968 var cues = this.subtitlesTrack_.cues;
7969 var start = cues[0].startTime;
7970 var end = cues[cues.length - 1].startTime;
7971
7972 return _videoJs2['default'].createTimeRanges([[start, end]]);
7973 }
7974
7975 /**
7976 * Gets and sets init segment for the provided map
7977 *
7978 * @param {Object} map
7979 * The map object representing the init segment to get or set
7980 * @param {Boolean=} set
7981 * If true, the init segment for the provided map should be saved
7982 * @return {Object}
7983 * map object for desired init segment
7984 */
7985 }, {
7986 key: 'initSegment',
7987 value: function initSegment(map) {
7988 var set = arguments.length <= 1 || arguments[1] === undefined ? false : arguments[1];
7989
7990 if (!map) {
7991 return null;
7992 }
7993
7994 var id = (0, _binUtils.initSegmentId)(map);
7995 var storedMap = this.initSegments_[id];
7996
7997 if (set && !storedMap && map.bytes) {
7998 // append WebVTT line terminators to the media initialization segment if it exists
7999 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
8000 // requires two or more WebVTT line terminators between the WebVTT header and the
8001 // rest of the file
8002 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
8003 var combinedSegment = new Uint8Array(combinedByteLength);
8004
8005 combinedSegment.set(map.bytes);
8006 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
8007
8008 this.initSegments_[id] = storedMap = {
8009 resolvedUri: map.resolvedUri,
8010 byterange: map.byterange,
8011 bytes: combinedSegment
8012 };
8013 }
8014
8015 return storedMap || map;
8016 }
8017
8018 /**
8019 * Returns true if all configuration required for loading is present, otherwise false.
8020 *
8021 * @return {Boolean} True if the all configuration is ready for loading
8022 * @private
8023 */
8024 }, {
8025 key: 'couldBeginLoading_',
8026 value: function couldBeginLoading_() {
8027 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
8028 }
8029
8030 /**
8031 * Once all the starting parameters have been specified, begin
8032 * operation. This method should only be invoked from the INIT
8033 * state.
8034 *
8035 * @private
8036 */
8037 }, {
8038 key: 'init_',
8039 value: function init_() {
8040 this.state = 'READY';
8041 this.resetEverything();
8042 return this.monitorBuffer_();
8043 }
8044
8045 /**
8046 * Set a subtitle track on the segment loader to add subtitles to
8047 *
8048 * @param {TextTrack=} track
8049 * The text track to add loaded subtitles to
8050 * @return {TextTrack}
8051 * Returns the subtitles track
8052 */
8053 }, {
8054 key: 'track',
8055 value: function track(_track) {
8056 if (typeof _track === 'undefined') {
8057 return this.subtitlesTrack_;
8058 }
8059
8060 this.subtitlesTrack_ = _track;
8061
8062 // if we were unpaused but waiting for a sourceUpdater, start
8063 // buffering now
8064 if (this.state === 'INIT' && this.couldBeginLoading_()) {
8065 this.init_();
8066 }
8067
8068 return this.subtitlesTrack_;
8069 }
8070
8071 /**
8072 * Remove any data in the source buffer between start and end times
8073 * @param {Number} start - the start time of the region to remove from the buffer
8074 * @param {Number} end - the end time of the region to remove from the buffer
8075 */
8076 }, {
8077 key: 'remove',
8078 value: function remove(start, end) {
8079 (0, _videojsContribMediaSourcesEs5RemoveCuesFromTrackJs2['default'])(start, end, this.subtitlesTrack_);
8080 }
8081
8082 /**
8083 * fill the buffer with segements unless the sourceBuffers are
8084 * currently updating
8085 *
8086 * Note: this function should only ever be called by monitorBuffer_
8087 * and never directly
8088 *
8089 * @private
8090 */
8091 }, {
8092 key: 'fillBuffer_',
8093 value: function fillBuffer_() {
8094 var _this = this;
8095
8096 if (!this.syncPoint_) {
8097 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
8098 }
8099
8100 // see if we need to begin loading immediately
8101 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
8102
8103 segmentInfo = this.skipEmptySegments_(segmentInfo);
8104
8105 if (!segmentInfo) {
8106 return;
8107 }
8108
8109 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
8110 // We don't have the timestamp offset that we need to sync subtitles.
8111 // Rerun on a timestamp offset or user interaction.
8112 var checkTimestampOffset = function checkTimestampOffset() {
8113 _this.state = 'READY';
8114 if (!_this.paused()) {
8115 // if not paused, queue a buffer check as soon as possible
8116 _this.monitorBuffer_();
8117 }
8118 };
8119
8120 this.syncController_.one('timestampoffset', checkTimestampOffset);
8121 this.state = 'WAITING_ON_TIMELINE';
8122 return;
8123 }
8124
8125 this.loadSegment_(segmentInfo);
8126 }
8127
8128 /**
8129 * Prevents the segment loader from requesting segments we know contain no subtitles
8130 * by walking forward until we find the next segment that we don't know whether it is
8131 * empty or not.
8132 *
8133 * @param {Object} segmentInfo
8134 * a segment info object that describes the current segment
8135 * @return {Object}
8136 * a segment info object that describes the current segment
8137 */
8138 }, {
8139 key: 'skipEmptySegments_',
8140 value: function skipEmptySegments_(segmentInfo) {
8141 while (segmentInfo && segmentInfo.segment.empty) {
8142 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
8143 }
8144 return segmentInfo;
8145 }
8146
8147 /**
8148 * append a decrypted segement to the SourceBuffer through a SourceUpdater
8149 *
8150 * @private
8151 */
8152 }, {
8153 key: 'handleSegment_',
8154 value: function handleSegment_() {
8155 var _this2 = this;
8156
8157 if (!this.pendingSegment_ || !this.subtitlesTrack_) {
8158 this.state = 'READY';
8159 return;
8160 }
8161
8162 this.state = 'APPENDING';
8163
8164 var segmentInfo = this.pendingSegment_;
8165 var segment = segmentInfo.segment;
8166
8167 // Make sure that vttjs has loaded, otherwise, wait till it finished loading
8168 if (typeof _globalWindow2['default'].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
8169 var _ret = (function () {
8170
8171 var loadHandler = function loadHandler() {
8172 _this2.handleSegment_();
8173 };
8174
8175 _this2.state = 'WAITING_ON_VTTJS';
8176 _this2.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
8177 _this2.subtitlesTrack_.tech_.one('vttjserror', function () {
8178 _this2.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
8179 _this2.error({
8180 message: 'Error loading vtt.js'
8181 });
8182 _this2.state = 'READY';
8183 _this2.pause();
8184 _this2.trigger('error');
8185 });
8186
8187 return {
8188 v: undefined
8189 };
8190 })();
8191
8192 if (typeof _ret === 'object') return _ret.v;
8193 }
8194
8195 segment.requested = true;
8196
8197 try {
8198 this.parseVTTCues_(segmentInfo);
8199 } catch (e) {
8200 this.error({
8201 message: e.message
8202 });
8203 this.state = 'READY';
8204 this.pause();
8205 return this.trigger('error');
8206 }
8207
8208 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
8209
8210 if (segmentInfo.isSyncRequest) {
8211 this.trigger('syncinfoupdate');
8212 this.pendingSegment_ = null;
8213 this.state = 'READY';
8214 return;
8215 }
8216
8217 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
8218
8219 this.mediaSecondsLoaded += segment.duration;
8220
8221 if (segmentInfo.cues.length) {
8222 // remove any overlapping cues to prevent doubling
8223 this.remove(segmentInfo.cues[0].endTime, segmentInfo.cues[segmentInfo.cues.length - 1].endTime);
8224 }
8225
8226 segmentInfo.cues.forEach(function (cue) {
8227 _this2.subtitlesTrack_.addCue(cue);
8228 });
8229
8230 this.handleUpdateEnd_();
8231 }
8232
8233 /**
8234 * Uses the WebVTT parser to parse the segment response
8235 *
8236 * @param {Object} segmentInfo
8237 * a segment info object that describes the current segment
8238 * @private
8239 */
8240 }, {
8241 key: 'parseVTTCues_',
8242 value: function parseVTTCues_(segmentInfo) {
8243 var decoder = undefined;
8244 var decodeBytesToString = false;
8245
8246 if (typeof _globalWindow2['default'].TextDecoder === 'function') {
8247 decoder = new _globalWindow2['default'].TextDecoder('utf8');
8248 } else {
8249 decoder = _globalWindow2['default'].WebVTT.StringDecoder();
8250 decodeBytesToString = true;
8251 }
8252
8253 var parser = new _globalWindow2['default'].WebVTT.Parser(_globalWindow2['default'], _globalWindow2['default'].vttjs, decoder);
8254
8255 segmentInfo.cues = [];
8256 segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
8257
8258 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
8259 parser.ontimestampmap = function (map) {
8260 return segmentInfo.timestampmap = map;
8261 };
8262 parser.onparsingerror = function (error) {
8263 _videoJs2['default'].log.warn('Error encountered when parsing cues: ' + error.message);
8264 };
8265
8266 if (segmentInfo.segment.map) {
8267 var mapData = segmentInfo.segment.map.bytes;
8268
8269 if (decodeBytesToString) {
8270 mapData = uintToString(mapData);
8271 }
8272
8273 parser.parse(mapData);
8274 }
8275
8276 var segmentData = segmentInfo.bytes;
8277
8278 if (decodeBytesToString) {
8279 segmentData = uintToString(segmentData);
8280 }
8281
8282 parser.parse(segmentData);
8283 parser.flush();
8284 }
8285
8286 /**
8287 * Updates the start and end times of any cues parsed by the WebVTT parser using
8288 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
8289 * from the SyncController
8290 *
8291 * @param {Object} segmentInfo
8292 * a segment info object that describes the current segment
8293 * @param {Object} mappingObj
8294 * object containing a mapping from TS to media time
8295 * @param {Object} playlist
8296 * the playlist object containing the segment
8297 * @private
8298 */
8299 }, {
8300 key: 'updateTimeMapping_',
8301 value: function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
8302 var segment = segmentInfo.segment;
8303
8304 if (!mappingObj) {
8305 // If the sync controller does not have a mapping of TS to Media Time for the
8306 // timeline, then we don't have enough information to update the cue
8307 // start/end times
8308 return;
8309 }
8310
8311 if (!segmentInfo.cues.length) {
8312 // If there are no cues, we also do not have enough information to figure out
8313 // segment timing. Mark that the segment contains no cues so we don't re-request
8314 // an empty segment.
8315 segment.empty = true;
8316 return;
8317 }
8318
8319 var timestampmap = segmentInfo.timestampmap;
8320 var diff = timestampmap.MPEGTS / 90000 - timestampmap.LOCAL + mappingObj.mapping;
8321
8322 segmentInfo.cues.forEach(function (cue) {
8323 // First convert cue time to TS time using the timestamp-map provided within the vtt
8324 cue.startTime += diff;
8325 cue.endTime += diff;
8326 });
8327
8328 if (!playlist.syncInfo) {
8329 var firstStart = segmentInfo.cues[0].startTime;
8330 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
8331
8332 playlist.syncInfo = {
8333 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
8334 time: Math.min(firstStart, lastStart - segment.duration)
8335 };
8336 }
8337 }
8338 }]);
8339
8340 return VTTSegmentLoader;
8341})(_segmentLoader2['default']);
8342
8343exports['default'] = VTTSegmentLoader;
8344module.exports = exports['default'];
8345}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
8346},{"./bin-utils":2,"./segment-loader":16,"global/window":32,"videojs-contrib-media-sources/es5/remove-cues-from-track.js":72}],21:[function(require,module,exports){
8347(function (global){
8348/**
8349 * @file xhr.js
8350 */
8351
8352/**
8353 * A wrapper for videojs.xhr that tracks bandwidth.
8354 *
8355 * @param {Object} options options for the XHR
8356 * @param {Function} callback the callback to call when done
8357 * @return {Request} the xhr request that is going to be made
8358 */
8359'use strict';
8360
8361Object.defineProperty(exports, '__esModule', {
8362 value: true
8363});
8364
8365function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8366
8367var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
8368
8369var _videoJs2 = _interopRequireDefault(_videoJs);
8370
8371var xhrFactory = function xhrFactory() {
8372 var xhr = function XhrFunction(options, callback) {
8373 // Add a default timeout for all hls requests
8374 options = (0, _videoJs.mergeOptions)({
8375 timeout: 45e3
8376 }, options);
8377
8378 // Allow an optional user-specified function to modify the option
8379 // object before we construct the xhr request
8380 var beforeRequest = XhrFunction.beforeRequest || _videoJs2['default'].Hls.xhr.beforeRequest;
8381
8382 if (beforeRequest && typeof beforeRequest === 'function') {
8383 var newOptions = beforeRequest(options);
8384
8385 if (newOptions) {
8386 options = newOptions;
8387 }
8388 }
8389
8390 var request = (0, _videoJs.xhr)(options, function (error, response) {
8391 var reqResponse = request.response;
8392
8393 if (!error && reqResponse) {
8394 request.responseTime = Date.now();
8395 request.roundTripTime = request.responseTime - request.requestTime;
8396 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
8397 if (!request.bandwidth) {
8398 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
8399 }
8400 }
8401
8402 // videojs.xhr now uses a specific code on the error
8403 // object to signal that a request has timed out instead
8404 // of setting a boolean on the request object
8405 if (error && error.code === 'ETIMEDOUT') {
8406 request.timedout = true;
8407 }
8408
8409 // videojs.xhr no longer considers status codes outside of 200 and 0
8410 // (for file uris) to be errors, but the old XHR did, so emulate that
8411 // behavior. Status 206 may be used in response to byterange requests.
8412 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
8413 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
8414 }
8415
8416 callback(error, request);
8417 });
8418 var originalAbort = request.abort;
8419
8420 request.abort = function () {
8421 request.aborted = true;
8422 return originalAbort.apply(request, arguments);
8423 };
8424 request.uri = options.uri;
8425 request.requestTime = Date.now();
8426 return request;
8427 };
8428
8429 return xhr;
8430};
8431
8432exports['default'] = xhrFactory;
8433module.exports = exports['default'];
8434}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
8435},{}],22:[function(require,module,exports){
8436/**
8437 * @file aes.js
8438 *
8439 * This file contains an adaptation of the AES decryption algorithm
8440 * from the Standford Javascript Cryptography Library. That work is
8441 * covered by the following copyright and permissions notice:
8442 *
8443 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
8444 * All rights reserved.
8445 *
8446 * Redistribution and use in source and binary forms, with or without
8447 * modification, are permitted provided that the following conditions are
8448 * met:
8449 *
8450 * 1. Redistributions of source code must retain the above copyright
8451 * notice, this list of conditions and the following disclaimer.
8452 *
8453 * 2. Redistributions in binary form must reproduce the above
8454 * copyright notice, this list of conditions and the following
8455 * disclaimer in the documentation and/or other materials provided
8456 * with the distribution.
8457 *
8458 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
8459 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
8460 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
8461 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
8462 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
8463 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
8464 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
8465 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
8466 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
8467 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
8468 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
8469 *
8470 * The views and conclusions contained in the software and documentation
8471 * are those of the authors and should not be interpreted as representing
8472 * official policies, either expressed or implied, of the authors.
8473 */
8474
8475/**
8476 * Expand the S-box tables.
8477 *
8478 * @private
8479 */
8480'use strict';
8481
8482Object.defineProperty(exports, '__esModule', {
8483 value: true
8484});
8485
8486var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
8487
8488function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
8489
8490var precompute = function precompute() {
8491 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
8492 var encTable = tables[0];
8493 var decTable = tables[1];
8494 var sbox = encTable[4];
8495 var sboxInv = decTable[4];
8496 var i = undefined;
8497 var x = undefined;
8498 var xInv = undefined;
8499 var d = [];
8500 var th = [];
8501 var x2 = undefined;
8502 var x4 = undefined;
8503 var x8 = undefined;
8504 var s = undefined;
8505 var tEnc = undefined;
8506 var tDec = undefined;
8507
8508 // Compute double and third tables
8509 for (i = 0; i < 256; i++) {
8510 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
8511 }
8512
8513 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
8514 // Compute sbox
8515 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
8516 s = s >> 8 ^ s & 255 ^ 99;
8517 sbox[x] = s;
8518 sboxInv[s] = x;
8519
8520 // Compute MixColumns
8521 x8 = d[x4 = d[x2 = d[x]]];
8522 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
8523 tEnc = d[s] * 0x101 ^ s * 0x1010100;
8524
8525 for (i = 0; i < 4; i++) {
8526 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
8527 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
8528 }
8529 }
8530
8531 // Compactify. Considerable speedup on Firefox.
8532 for (i = 0; i < 5; i++) {
8533 encTable[i] = encTable[i].slice(0);
8534 decTable[i] = decTable[i].slice(0);
8535 }
8536 return tables;
8537};
8538var aesTables = null;
8539
8540/**
8541 * Schedule out an AES key for both encryption and decryption. This
8542 * is a low-level class. Use a cipher mode to do bulk encryption.
8543 *
8544 * @class AES
8545 * @param key {Array} The key as an array of 4, 6 or 8 words.
8546 */
8547
8548var AES = (function () {
8549 function AES(key) {
8550 _classCallCheck(this, AES);
8551
8552 /**
8553 * The expanded S-box and inverse S-box tables. These will be computed
8554 * on the client so that we don't have to send them down the wire.
8555 *
8556 * There are two tables, _tables[0] is for encryption and
8557 * _tables[1] is for decryption.
8558 *
8559 * The first 4 sub-tables are the expanded S-box with MixColumns. The
8560 * last (_tables[01][4]) is the S-box itself.
8561 *
8562 * @private
8563 */
8564 // if we have yet to precompute the S-box tables
8565 // do so now
8566 if (!aesTables) {
8567 aesTables = precompute();
8568 }
8569 // then make a copy of that object for use
8570 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
8571 var i = undefined;
8572 var j = undefined;
8573 var tmp = undefined;
8574 var encKey = undefined;
8575 var decKey = undefined;
8576 var sbox = this._tables[0][4];
8577 var decTable = this._tables[1];
8578 var keyLen = key.length;
8579 var rcon = 1;
8580
8581 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
8582 throw new Error('Invalid aes key size');
8583 }
8584
8585 encKey = key.slice(0);
8586 decKey = [];
8587 this._key = [encKey, decKey];
8588
8589 // schedule encryption keys
8590 for (i = keyLen; i < 4 * keyLen + 28; i++) {
8591 tmp = encKey[i - 1];
8592
8593 // apply sbox
8594 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
8595 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
8596
8597 // shift rows and add rcon
8598 if (i % keyLen === 0) {
8599 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
8600 rcon = rcon << 1 ^ (rcon >> 7) * 283;
8601 }
8602 }
8603
8604 encKey[i] = encKey[i - keyLen] ^ tmp;
8605 }
8606
8607 // schedule decryption keys
8608 for (j = 0; i; j++, i--) {
8609 tmp = encKey[j & 3 ? i : i - 4];
8610 if (i <= 4 || j < 4) {
8611 decKey[j] = tmp;
8612 } else {
8613 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
8614 }
8615 }
8616 }
8617
8618 /**
8619 * Decrypt 16 bytes, specified as four 32-bit words.
8620 *
8621 * @param {Number} encrypted0 the first word to decrypt
8622 * @param {Number} encrypted1 the second word to decrypt
8623 * @param {Number} encrypted2 the third word to decrypt
8624 * @param {Number} encrypted3 the fourth word to decrypt
8625 * @param {Int32Array} out the array to write the decrypted words
8626 * into
8627 * @param {Number} offset the offset into the output array to start
8628 * writing results
8629 * @return {Array} The plaintext.
8630 */
8631
8632 _createClass(AES, [{
8633 key: 'decrypt',
8634 value: function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
8635 var key = this._key[1];
8636 // state variables a,b,c,d are loaded with pre-whitened data
8637 var a = encrypted0 ^ key[0];
8638 var b = encrypted3 ^ key[1];
8639 var c = encrypted2 ^ key[2];
8640 var d = encrypted1 ^ key[3];
8641 var a2 = undefined;
8642 var b2 = undefined;
8643 var c2 = undefined;
8644
8645 // key.length === 2 ?
8646 var nInnerRounds = key.length / 4 - 2;
8647 var i = undefined;
8648 var kIndex = 4;
8649 var table = this._tables[1];
8650
8651 // load up the tables
8652 var table0 = table[0];
8653 var table1 = table[1];
8654 var table2 = table[2];
8655 var table3 = table[3];
8656 var sbox = table[4];
8657
8658 // Inner rounds. Cribbed from OpenSSL.
8659 for (i = 0; i < nInnerRounds; i++) {
8660 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
8661 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
8662 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
8663 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
8664 kIndex += 4;
8665 a = a2;b = b2;c = c2;
8666 }
8667
8668 // Last round.
8669 for (i = 0; i < 4; i++) {
8670 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
8671 a2 = a;a = b;b = c;c = d;d = a2;
8672 }
8673 }
8674 }]);
8675
8676 return AES;
8677})();
8678
8679exports['default'] = AES;
8680module.exports = exports['default'];
8681},{}],23:[function(require,module,exports){
8682/**
8683 * @file async-stream.js
8684 */
8685'use strict';
8686
8687Object.defineProperty(exports, '__esModule', {
8688 value: true
8689});
8690
8691var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
8692
8693var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
8694
8695function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8696
8697function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
8698
8699function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
8700
8701var _stream = require('./stream');
8702
8703var _stream2 = _interopRequireDefault(_stream);
8704
8705/**
8706 * A wrapper around the Stream class to use setTiemout
8707 * and run stream "jobs" Asynchronously
8708 *
8709 * @class AsyncStream
8710 * @extends Stream
8711 */
8712
8713var AsyncStream = (function (_Stream) {
8714 _inherits(AsyncStream, _Stream);
8715
8716 function AsyncStream() {
8717 _classCallCheck(this, AsyncStream);
8718
8719 _get(Object.getPrototypeOf(AsyncStream.prototype), 'constructor', this).call(this, _stream2['default']);
8720 this.jobs = [];
8721 this.delay = 1;
8722 this.timeout_ = null;
8723 }
8724
8725 /**
8726 * process an async job
8727 *
8728 * @private
8729 */
8730
8731 _createClass(AsyncStream, [{
8732 key: 'processJob_',
8733 value: function processJob_() {
8734 this.jobs.shift()();
8735 if (this.jobs.length) {
8736 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
8737 } else {
8738 this.timeout_ = null;
8739 }
8740 }
8741
8742 /**
8743 * push a job into the stream
8744 *
8745 * @param {Function} job the job to push into the stream
8746 */
8747 }, {
8748 key: 'push',
8749 value: function push(job) {
8750 this.jobs.push(job);
8751 if (!this.timeout_) {
8752 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
8753 }
8754 }
8755 }]);
8756
8757 return AsyncStream;
8758})(_stream2['default']);
8759
8760exports['default'] = AsyncStream;
8761module.exports = exports['default'];
8762},{"./stream":26}],24:[function(require,module,exports){
8763/**
8764 * @file decrypter.js
8765 *
8766 * An asynchronous implementation of AES-128 CBC decryption with
8767 * PKCS#7 padding.
8768 */
8769
8770'use strict';
8771
8772Object.defineProperty(exports, '__esModule', {
8773 value: true
8774});
8775
8776var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
8777
8778function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8779
8780function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
8781
8782var _aes = require('./aes');
8783
8784var _aes2 = _interopRequireDefault(_aes);
8785
8786var _asyncStream = require('./async-stream');
8787
8788var _asyncStream2 = _interopRequireDefault(_asyncStream);
8789
8790var _pkcs7 = require('pkcs7');
8791
8792/**
8793 * Convert network-order (big-endian) bytes into their little-endian
8794 * representation.
8795 */
8796var ntoh = function ntoh(word) {
8797 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
8798};
8799
8800/**
8801 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
8802 *
8803 * @param {Uint8Array} encrypted the encrypted bytes
8804 * @param {Uint32Array} key the bytes of the decryption key
8805 * @param {Uint32Array} initVector the initialization vector (IV) to
8806 * use for the first round of CBC.
8807 * @return {Uint8Array} the decrypted bytes
8808 *
8809 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
8810 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
8811 * @see https://tools.ietf.org/html/rfc2315
8812 */
8813var decrypt = function decrypt(encrypted, key, initVector) {
8814 // word-level access to the encrypted bytes
8815 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
8816
8817 var decipher = new _aes2['default'](Array.prototype.slice.call(key));
8818
8819 // byte and word-level access for the decrypted output
8820 var decrypted = new Uint8Array(encrypted.byteLength);
8821 var decrypted32 = new Int32Array(decrypted.buffer);
8822
8823 // temporary variables for working with the IV, encrypted, and
8824 // decrypted data
8825 var init0 = undefined;
8826 var init1 = undefined;
8827 var init2 = undefined;
8828 var init3 = undefined;
8829 var encrypted0 = undefined;
8830 var encrypted1 = undefined;
8831 var encrypted2 = undefined;
8832 var encrypted3 = undefined;
8833
8834 // iteration variable
8835 var wordIx = undefined;
8836
8837 // pull out the words of the IV to ensure we don't modify the
8838 // passed-in reference and easier access
8839 init0 = initVector[0];
8840 init1 = initVector[1];
8841 init2 = initVector[2];
8842 init3 = initVector[3];
8843
8844 // decrypt four word sequences, applying cipher-block chaining (CBC)
8845 // to each decrypted block
8846 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
8847 // convert big-endian (network order) words into little-endian
8848 // (javascript order)
8849 encrypted0 = ntoh(encrypted32[wordIx]);
8850 encrypted1 = ntoh(encrypted32[wordIx + 1]);
8851 encrypted2 = ntoh(encrypted32[wordIx + 2]);
8852 encrypted3 = ntoh(encrypted32[wordIx + 3]);
8853
8854 // decrypt the block
8855 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
8856
8857 // XOR with the IV, and restore network byte-order to obtain the
8858 // plaintext
8859 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
8860 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
8861 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
8862 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
8863
8864 // setup the IV for the next round
8865 init0 = encrypted0;
8866 init1 = encrypted1;
8867 init2 = encrypted2;
8868 init3 = encrypted3;
8869 }
8870
8871 return decrypted;
8872};
8873
8874exports.decrypt = decrypt;
8875/**
8876 * The `Decrypter` class that manages decryption of AES
8877 * data through `AsyncStream` objects and the `decrypt`
8878 * function
8879 *
8880 * @param {Uint8Array} encrypted the encrypted bytes
8881 * @param {Uint32Array} key the bytes of the decryption key
8882 * @param {Uint32Array} initVector the initialization vector (IV) to
8883 * @param {Function} done the function to run when done
8884 * @class Decrypter
8885 */
8886
8887var Decrypter = (function () {
8888 function Decrypter(encrypted, key, initVector, done) {
8889 _classCallCheck(this, Decrypter);
8890
8891 var step = Decrypter.STEP;
8892 var encrypted32 = new Int32Array(encrypted.buffer);
8893 var decrypted = new Uint8Array(encrypted.byteLength);
8894 var i = 0;
8895
8896 this.asyncStream_ = new _asyncStream2['default']();
8897
8898 // split up the encryption job and do the individual chunks asynchronously
8899 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
8900 for (i = step; i < encrypted32.length; i += step) {
8901 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
8902 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
8903 }
8904 // invoke the done() callback when everything is finished
8905 this.asyncStream_.push(function () {
8906 // remove pkcs#7 padding from the decrypted bytes
8907 done(null, (0, _pkcs7.unpad)(decrypted));
8908 });
8909 }
8910
8911 /**
8912 * a getter for step the maximum number of bytes to process at one time
8913 *
8914 * @return {Number} the value of step 32000
8915 */
8916
8917 _createClass(Decrypter, [{
8918 key: 'decryptChunk_',
8919
8920 /**
8921 * @private
8922 */
8923 value: function decryptChunk_(encrypted, key, initVector, decrypted) {
8924 return function () {
8925 var bytes = decrypt(encrypted, key, initVector);
8926
8927 decrypted.set(bytes, encrypted.byteOffset);
8928 };
8929 }
8930 }], [{
8931 key: 'STEP',
8932 get: function get() {
8933 // 4 * 8000;
8934 return 32000;
8935 }
8936 }]);
8937
8938 return Decrypter;
8939})();
8940
8941exports.Decrypter = Decrypter;
8942exports['default'] = {
8943 Decrypter: Decrypter,
8944 decrypt: decrypt
8945};
8946},{"./aes":22,"./async-stream":23,"pkcs7":28}],25:[function(require,module,exports){
8947/**
8948 * @file index.js
8949 *
8950 * Index module to easily import the primary components of AES-128
8951 * decryption. Like this:
8952 *
8953 * ```js
8954 * import {Decrypter, decrypt, AsyncStream} from 'aes-decrypter';
8955 * ```
8956 */
8957'use strict';
8958
8959Object.defineProperty(exports, '__esModule', {
8960 value: true
8961});
8962
8963function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
8964
8965var _decrypter = require('./decrypter');
8966
8967var _asyncStream = require('./async-stream');
8968
8969var _asyncStream2 = _interopRequireDefault(_asyncStream);
8970
8971exports['default'] = {
8972 decrypt: _decrypter.decrypt,
8973 Decrypter: _decrypter.Decrypter,
8974 AsyncStream: _asyncStream2['default']
8975};
8976module.exports = exports['default'];
8977},{"./async-stream":23,"./decrypter":24}],26:[function(require,module,exports){
8978/**
8979 * @file stream.js
8980 */
8981/**
8982 * A lightweight readable stream implemention that handles event dispatching.
8983 *
8984 * @class Stream
8985 */
8986'use strict';
8987
8988Object.defineProperty(exports, '__esModule', {
8989 value: true
8990});
8991
8992var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
8993
8994function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
8995
8996var Stream = (function () {
8997 function Stream() {
8998 _classCallCheck(this, Stream);
8999
9000 this.listeners = {};
9001 }
9002
9003 /**
9004 * Add a listener for a specified event type.
9005 *
9006 * @param {String} type the event name
9007 * @param {Function} listener the callback to be invoked when an event of
9008 * the specified type occurs
9009 */
9010
9011 _createClass(Stream, [{
9012 key: 'on',
9013 value: function on(type, listener) {
9014 if (!this.listeners[type]) {
9015 this.listeners[type] = [];
9016 }
9017 this.listeners[type].push(listener);
9018 }
9019
9020 /**
9021 * Remove a listener for a specified event type.
9022 *
9023 * @param {String} type the event name
9024 * @param {Function} listener a function previously registered for this
9025 * type of event through `on`
9026 * @return {Boolean} if we could turn it off or not
9027 */
9028 }, {
9029 key: 'off',
9030 value: function off(type, listener) {
9031 var index = undefined;
9032
9033 if (!this.listeners[type]) {
9034 return false;
9035 }
9036 index = this.listeners[type].indexOf(listener);
9037 this.listeners[type].splice(index, 1);
9038 return index > -1;
9039 }
9040
9041 /**
9042 * Trigger an event of the specified type on this stream. Any additional
9043 * arguments to this function are passed as parameters to event listeners.
9044 *
9045 * @param {String} type the event name
9046 */
9047 }, {
9048 key: 'trigger',
9049 value: function trigger(type) {
9050 var callbacks = undefined;
9051 var i = undefined;
9052 var length = undefined;
9053 var args = undefined;
9054
9055 callbacks = this.listeners[type];
9056 if (!callbacks) {
9057 return;
9058 }
9059 // Slicing the arguments on every invocation of this method
9060 // can add a significant amount of overhead. Avoid the
9061 // intermediate object creation for the common case of a
9062 // single callback argument
9063 if (arguments.length === 2) {
9064 length = callbacks.length;
9065 for (i = 0; i < length; ++i) {
9066 callbacks[i].call(this, arguments[1]);
9067 }
9068 } else {
9069 args = Array.prototype.slice.call(arguments, 1);
9070 length = callbacks.length;
9071 for (i = 0; i < length; ++i) {
9072 callbacks[i].apply(this, args);
9073 }
9074 }
9075 }
9076
9077 /**
9078 * Destroys the stream and cleans up.
9079 */
9080 }, {
9081 key: 'dispose',
9082 value: function dispose() {
9083 this.listeners = {};
9084 }
9085
9086 /**
9087 * Forwards all `data` events on this stream to the destination stream. The
9088 * destination stream should provide a method `push` to receive the data
9089 * events as they arrive.
9090 *
9091 * @param {Stream} destination the stream that will receive all `data` events
9092 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
9093 */
9094 }, {
9095 key: 'pipe',
9096 value: function pipe(destination) {
9097 this.on('data', function (data) {
9098 destination.push(data);
9099 });
9100 }
9101 }]);
9102
9103 return Stream;
9104})();
9105
9106exports['default'] = Stream;
9107module.exports = exports['default'];
9108},{}],27:[function(require,module,exports){
9109/*
9110 * pkcs7.pad
9111 * https://github.com/brightcove/pkcs7
9112 *
9113 * Copyright (c) 2014 Brightcove
9114 * Licensed under the apache2 license.
9115 */
9116
9117'use strict';
9118
9119var PADDING;
9120
9121/**
9122 * Returns a new Uint8Array that is padded with PKCS#7 padding.
9123 * @param plaintext {Uint8Array} the input bytes before encryption
9124 * @return {Uint8Array} the padded bytes
9125 * @see http://tools.ietf.org/html/rfc5652
9126 */
9127module.exports = function pad(plaintext) {
9128 var padding = PADDING[(plaintext.byteLength % 16) || 0],
9129 result = new Uint8Array(plaintext.byteLength + padding.length);
9130 result.set(plaintext);
9131 result.set(padding, plaintext.byteLength);
9132 return result;
9133};
9134
9135// pre-define the padding values
9136PADDING = [
9137 [16, 16, 16, 16,
9138 16, 16, 16, 16,
9139 16, 16, 16, 16,
9140 16, 16, 16, 16],
9141
9142 [15, 15, 15, 15,
9143 15, 15, 15, 15,
9144 15, 15, 15, 15,
9145 15, 15, 15],
9146
9147 [14, 14, 14, 14,
9148 14, 14, 14, 14,
9149 14, 14, 14, 14,
9150 14, 14],
9151
9152 [13, 13, 13, 13,
9153 13, 13, 13, 13,
9154 13, 13, 13, 13,
9155 13],
9156
9157 [12, 12, 12, 12,
9158 12, 12, 12, 12,
9159 12, 12, 12, 12],
9160
9161 [11, 11, 11, 11,
9162 11, 11, 11, 11,
9163 11, 11, 11],
9164
9165 [10, 10, 10, 10,
9166 10, 10, 10, 10,
9167 10, 10],
9168
9169 [9, 9, 9, 9,
9170 9, 9, 9, 9,
9171 9],
9172
9173 [8, 8, 8, 8,
9174 8, 8, 8, 8],
9175
9176 [7, 7, 7, 7,
9177 7, 7, 7],
9178
9179 [6, 6, 6, 6,
9180 6, 6],
9181
9182 [5, 5, 5, 5,
9183 5],
9184
9185 [4, 4, 4, 4],
9186
9187 [3, 3, 3],
9188
9189 [2, 2],
9190
9191 [1]
9192];
9193
9194},{}],28:[function(require,module,exports){
9195/*
9196 * pkcs7
9197 * https://github.com/brightcove/pkcs7
9198 *
9199 * Copyright (c) 2014 Brightcove
9200 * Licensed under the apache2 license.
9201 */
9202
9203'use strict';
9204
9205exports.pad = require('./pad.js');
9206exports.unpad = require('./unpad.js');
9207
9208},{"./pad.js":27,"./unpad.js":29}],29:[function(require,module,exports){
9209/*
9210 * pkcs7.unpad
9211 * https://github.com/brightcove/pkcs7
9212 *
9213 * Copyright (c) 2014 Brightcove
9214 * Licensed under the apache2 license.
9215 */
9216
9217'use strict';
9218
9219/**
9220 * Returns the subarray of a Uint8Array without PKCS#7 padding.
9221 * @param padded {Uint8Array} unencrypted bytes that have been padded
9222 * @return {Uint8Array} the unpadded bytes
9223 * @see http://tools.ietf.org/html/rfc5652
9224 */
9225module.exports = function unpad(padded) {
9226 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
9227};
9228
9229},{}],30:[function(require,module,exports){
9230
9231},{}],31:[function(require,module,exports){
9232(function (global){
9233var topLevel = typeof global !== 'undefined' ? global :
9234 typeof window !== 'undefined' ? window : {}
9235var minDoc = require('min-document');
9236
9237var doccy;
9238
9239if (typeof document !== 'undefined') {
9240 doccy = document;
9241} else {
9242 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
9243
9244 if (!doccy) {
9245 doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
9246 }
9247}
9248
9249module.exports = doccy;
9250
9251}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
9252},{"min-document":30}],32:[function(require,module,exports){
9253(function (global){
9254var win;
9255
9256if (typeof window !== "undefined") {
9257 win = window;
9258} else if (typeof global !== "undefined") {
9259 win = global;
9260} else if (typeof self !== "undefined"){
9261 win = self;
9262} else {
9263 win = {};
9264}
9265
9266module.exports = win;
9267
9268}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
9269},{}],33:[function(require,module,exports){
9270'use strict';
9271
9272var _lineStream = require('./line-stream');
9273
9274var _lineStream2 = _interopRequireDefault(_lineStream);
9275
9276var _parseStream = require('./parse-stream');
9277
9278var _parseStream2 = _interopRequireDefault(_parseStream);
9279
9280var _parser = require('./parser');
9281
9282var _parser2 = _interopRequireDefault(_parser);
9283
9284function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
9285
9286module.exports = {
9287 LineStream: _lineStream2['default'],
9288 ParseStream: _parseStream2['default'],
9289 Parser: _parser2['default']
9290}; /**
9291 * @file m3u8/index.js
9292 *
9293 * Utilities for parsing M3U8 files. If the entire manifest is available,
9294 * `Parser` will create an object representation with enough detail for managing
9295 * playback. `ParseStream` and `LineStream` are lower-level parsing primitives
9296 * that do not assume the entirety of the manifest is ready and expose a
9297 * ReadableStream-like interface.
9298 */
9299},{"./line-stream":34,"./parse-stream":35,"./parser":36}],34:[function(require,module,exports){
9300'use strict';
9301
9302Object.defineProperty(exports, "__esModule", {
9303 value: true
9304});
9305
9306var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
9307
9308var _stream = require('./stream');
9309
9310var _stream2 = _interopRequireDefault(_stream);
9311
9312function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
9313
9314function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
9315
9316function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
9317
9318function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
9319 * @file m3u8/line-stream.js
9320 */
9321
9322
9323/**
9324 * A stream that buffers string input and generates a `data` event for each
9325 * line.
9326 *
9327 * @class LineStream
9328 * @extends Stream
9329 */
9330var LineStream = function (_Stream) {
9331 _inherits(LineStream, _Stream);
9332
9333 function LineStream() {
9334 _classCallCheck(this, LineStream);
9335
9336 var _this = _possibleConstructorReturn(this, (LineStream.__proto__ || Object.getPrototypeOf(LineStream)).call(this));
9337
9338 _this.buffer = '';
9339 return _this;
9340 }
9341
9342 /**
9343 * Add new data to be parsed.
9344 *
9345 * @param {String} data the text to process
9346 */
9347
9348
9349 _createClass(LineStream, [{
9350 key: 'push',
9351 value: function push(data) {
9352 var nextNewline = void 0;
9353
9354 this.buffer += data;
9355 nextNewline = this.buffer.indexOf('\n');
9356
9357 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
9358 this.trigger('data', this.buffer.substring(0, nextNewline));
9359 this.buffer = this.buffer.substring(nextNewline + 1);
9360 }
9361 }
9362 }]);
9363
9364 return LineStream;
9365}(_stream2['default']);
9366
9367exports['default'] = LineStream;
9368},{"./stream":37}],35:[function(require,module,exports){
9369'use strict';
9370
9371Object.defineProperty(exports, "__esModule", {
9372 value: true
9373});
9374
9375var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
9376
9377var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
9378
9379var _stream = require('./stream');
9380
9381var _stream2 = _interopRequireDefault(_stream);
9382
9383function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
9384
9385function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
9386
9387function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
9388
9389function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
9390 * @file m3u8/parse-stream.js
9391 */
9392
9393
9394/**
9395 * "forgiving" attribute list psuedo-grammar:
9396 * attributes -> keyvalue (',' keyvalue)*
9397 * keyvalue -> key '=' value
9398 * key -> [^=]*
9399 * value -> '"' [^"]* '"' | [^,]*
9400 */
9401var attributeSeparator = function attributeSeparator() {
9402 var key = '[^=]*';
9403 var value = '"[^"]*"|[^,]*';
9404 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
9405
9406 return new RegExp('(?:^|,)(' + keyvalue + ')');
9407};
9408
9409/**
9410 * Parse attributes from a line given the seperator
9411 *
9412 * @param {String} attributes the attibute line to parse
9413 */
9414var parseAttributes = function parseAttributes(attributes) {
9415 // split the string using attributes as the separator
9416 var attrs = attributes.split(attributeSeparator());
9417 var result = {};
9418 var i = attrs.length;
9419 var attr = void 0;
9420
9421 while (i--) {
9422 // filter out unmatched portions of the string
9423 if (attrs[i] === '') {
9424 continue;
9425 }
9426
9427 // split the key and value
9428 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1);
9429 // trim whitespace and remove optional quotes around the value
9430 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
9431 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
9432 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
9433 result[attr[0]] = attr[1];
9434 }
9435 return result;
9436};
9437
9438/**
9439 * A line-level M3U8 parser event stream. It expects to receive input one
9440 * line at a time and performs a context-free parse of its contents. A stream
9441 * interpretation of a manifest can be useful if the manifest is expected to
9442 * be too large to fit comfortably into memory or the entirety of the input
9443 * is not immediately available. Otherwise, it's probably much easier to work
9444 * with a regular `Parser` object.
9445 *
9446 * Produces `data` events with an object that captures the parser's
9447 * interpretation of the input. That object has a property `tag` that is one
9448 * of `uri`, `comment`, or `tag`. URIs only have a single additional
9449 * property, `line`, which captures the entirety of the input without
9450 * interpretation. Comments similarly have a single additional property
9451 * `text` which is the input without the leading `#`.
9452 *
9453 * Tags always have a property `tagType` which is the lower-cased version of
9454 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
9455 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
9456 * tags are given the tag type `unknown` and a single additional property
9457 * `data` with the remainder of the input.
9458 *
9459 * @class ParseStream
9460 * @extends Stream
9461 */
9462
9463var ParseStream = function (_Stream) {
9464 _inherits(ParseStream, _Stream);
9465
9466 function ParseStream() {
9467 _classCallCheck(this, ParseStream);
9468
9469 return _possibleConstructorReturn(this, (ParseStream.__proto__ || Object.getPrototypeOf(ParseStream)).call(this));
9470 }
9471
9472 /**
9473 * Parses an additional line of input.
9474 *
9475 * @param {String} line a single line of an M3U8 file to parse
9476 */
9477
9478
9479 _createClass(ParseStream, [{
9480 key: 'push',
9481 value: function push(line) {
9482 var match = void 0;
9483 var event = void 0;
9484
9485 // strip whitespace
9486 line = line.replace(/^[\u0000\s]+|[\u0000\s]+$/g, '');
9487 if (line.length === 0) {
9488 // ignore empty lines
9489 return;
9490 }
9491
9492 // URIs
9493 if (line[0] !== '#') {
9494 this.trigger('data', {
9495 type: 'uri',
9496 uri: line
9497 });
9498 return;
9499 }
9500
9501 // Comments
9502 if (line.indexOf('#EXT') !== 0) {
9503 this.trigger('data', {
9504 type: 'comment',
9505 text: line.slice(1)
9506 });
9507 return;
9508 }
9509
9510 // strip off any carriage returns here so the regex matching
9511 // doesn't have to account for them.
9512 line = line.replace('\r', '');
9513
9514 // Tags
9515 match = /^#EXTM3U/.exec(line);
9516 if (match) {
9517 this.trigger('data', {
9518 type: 'tag',
9519 tagType: 'm3u'
9520 });
9521 return;
9522 }
9523 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(line);
9524 if (match) {
9525 event = {
9526 type: 'tag',
9527 tagType: 'inf'
9528 };
9529 if (match[1]) {
9530 event.duration = parseFloat(match[1]);
9531 }
9532 if (match[2]) {
9533 event.title = match[2];
9534 }
9535 this.trigger('data', event);
9536 return;
9537 }
9538 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(line);
9539 if (match) {
9540 event = {
9541 type: 'tag',
9542 tagType: 'targetduration'
9543 };
9544 if (match[1]) {
9545 event.duration = parseInt(match[1], 10);
9546 }
9547 this.trigger('data', event);
9548 return;
9549 }
9550 match = /^#ZEN-TOTAL-DURATION:?([0-9.]*)?/.exec(line);
9551 if (match) {
9552 event = {
9553 type: 'tag',
9554 tagType: 'totalduration'
9555 };
9556 if (match[1]) {
9557 event.duration = parseInt(match[1], 10);
9558 }
9559 this.trigger('data', event);
9560 return;
9561 }
9562 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(line);
9563 if (match) {
9564 event = {
9565 type: 'tag',
9566 tagType: 'version'
9567 };
9568 if (match[1]) {
9569 event.version = parseInt(match[1], 10);
9570 }
9571 this.trigger('data', event);
9572 return;
9573 }
9574 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(line);
9575 if (match) {
9576 event = {
9577 type: 'tag',
9578 tagType: 'media-sequence'
9579 };
9580 if (match[1]) {
9581 event.number = parseInt(match[1], 10);
9582 }
9583 this.trigger('data', event);
9584 return;
9585 }
9586 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(line);
9587 if (match) {
9588 event = {
9589 type: 'tag',
9590 tagType: 'discontinuity-sequence'
9591 };
9592 if (match[1]) {
9593 event.number = parseInt(match[1], 10);
9594 }
9595 this.trigger('data', event);
9596 return;
9597 }
9598 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(line);
9599 if (match) {
9600 event = {
9601 type: 'tag',
9602 tagType: 'playlist-type'
9603 };
9604 if (match[1]) {
9605 event.playlistType = match[1];
9606 }
9607 this.trigger('data', event);
9608 return;
9609 }
9610 match = /^#EXT-X-BYTERANGE:?([0-9.]*)?@?([0-9.]*)?/.exec(line);
9611 if (match) {
9612 event = {
9613 type: 'tag',
9614 tagType: 'byterange'
9615 };
9616 if (match[1]) {
9617 event.length = parseInt(match[1], 10);
9618 }
9619 if (match[2]) {
9620 event.offset = parseInt(match[2], 10);
9621 }
9622 this.trigger('data', event);
9623 return;
9624 }
9625 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(line);
9626 if (match) {
9627 event = {
9628 type: 'tag',
9629 tagType: 'allow-cache'
9630 };
9631 if (match[1]) {
9632 event.allowed = !/NO/.test(match[1]);
9633 }
9634 this.trigger('data', event);
9635 return;
9636 }
9637 match = /^#EXT-X-MAP:?(.*)$/.exec(line);
9638 if (match) {
9639 event = {
9640 type: 'tag',
9641 tagType: 'map'
9642 };
9643
9644 if (match[1]) {
9645 var attributes = parseAttributes(match[1]);
9646
9647 if (attributes.URI) {
9648 event.uri = attributes.URI;
9649 }
9650 if (attributes.BYTERANGE) {
9651 var _attributes$BYTERANGE = attributes.BYTERANGE.split('@'),
9652 _attributes$BYTERANGE2 = _slicedToArray(_attributes$BYTERANGE, 2),
9653 length = _attributes$BYTERANGE2[0],
9654 offset = _attributes$BYTERANGE2[1];
9655
9656 event.byterange = {};
9657 if (length) {
9658 event.byterange.length = parseInt(length, 10);
9659 }
9660 if (offset) {
9661 event.byterange.offset = parseInt(offset, 10);
9662 }
9663 }
9664 }
9665
9666 this.trigger('data', event);
9667 return;
9668 }
9669 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(line);
9670 if (match) {
9671 event = {
9672 type: 'tag',
9673 tagType: 'stream-inf'
9674 };
9675 if (match[1]) {
9676 event.attributes = parseAttributes(match[1]);
9677
9678 if (event.attributes.RESOLUTION) {
9679 var split = event.attributes.RESOLUTION.split('x');
9680 var resolution = {};
9681
9682 if (split[0]) {
9683 resolution.width = parseInt(split[0], 10);
9684 }
9685 if (split[1]) {
9686 resolution.height = parseInt(split[1], 10);
9687 }
9688 event.attributes.RESOLUTION = resolution;
9689 }
9690 if (event.attributes.BANDWIDTH) {
9691 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
9692 }
9693 if (event.attributes['PROGRAM-ID']) {
9694 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
9695 }
9696 }
9697 this.trigger('data', event);
9698 return;
9699 }
9700 match = /^#EXT-X-MEDIA:?(.*)$/.exec(line);
9701 if (match) {
9702 event = {
9703 type: 'tag',
9704 tagType: 'media'
9705 };
9706 if (match[1]) {
9707 event.attributes = parseAttributes(match[1]);
9708 }
9709 this.trigger('data', event);
9710 return;
9711 }
9712 match = /^#EXT-X-ENDLIST/.exec(line);
9713 if (match) {
9714 this.trigger('data', {
9715 type: 'tag',
9716 tagType: 'endlist'
9717 });
9718 return;
9719 }
9720 match = /^#EXT-X-DISCONTINUITY/.exec(line);
9721 if (match) {
9722 this.trigger('data', {
9723 type: 'tag',
9724 tagType: 'discontinuity'
9725 });
9726 return;
9727 }
9728 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(line);
9729 if (match) {
9730 event = {
9731 type: 'tag',
9732 tagType: 'program-date-time'
9733 };
9734 if (match[1]) {
9735 event.dateTimeString = match[1];
9736 event.dateTimeObject = new Date(match[1]);
9737 }
9738 this.trigger('data', event);
9739 return;
9740 }
9741 match = /^#EXT-X-KEY:?(.*)$/.exec(line);
9742 if (match) {
9743 event = {
9744 type: 'tag',
9745 tagType: 'key'
9746 };
9747 if (match[1]) {
9748 event.attributes = parseAttributes(match[1]);
9749 // parse the IV string into a Uint32Array
9750 if (event.attributes.IV) {
9751 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
9752 event.attributes.IV = event.attributes.IV.substring(2);
9753 }
9754
9755 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
9756 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
9757 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
9758 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
9759 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
9760 event.attributes.IV = new Uint32Array(event.attributes.IV);
9761 }
9762 }
9763 this.trigger('data', event);
9764 return;
9765 }
9766 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(line);
9767 if (match) {
9768 event = {
9769 type: 'tag',
9770 tagType: 'cue-out-cont'
9771 };
9772 if (match[1]) {
9773 event.data = match[1];
9774 } else {
9775 event.data = '';
9776 }
9777 this.trigger('data', event);
9778 return;
9779 }
9780 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(line);
9781 if (match) {
9782 event = {
9783 type: 'tag',
9784 tagType: 'cue-out'
9785 };
9786 if (match[1]) {
9787 event.data = match[1];
9788 } else {
9789 event.data = '';
9790 }
9791 this.trigger('data', event);
9792 return;
9793 }
9794 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(line);
9795 if (match) {
9796 event = {
9797 type: 'tag',
9798 tagType: 'cue-in'
9799 };
9800 if (match[1]) {
9801 event.data = match[1];
9802 } else {
9803 event.data = '';
9804 }
9805 this.trigger('data', event);
9806 return;
9807 }
9808
9809 // unknown tag type
9810 this.trigger('data', {
9811 type: 'tag',
9812 data: line.slice(4)
9813 });
9814 }
9815 }]);
9816
9817 return ParseStream;
9818}(_stream2['default']);
9819
9820exports['default'] = ParseStream;
9821},{"./stream":37}],36:[function(require,module,exports){
9822'use strict';
9823
9824Object.defineProperty(exports, "__esModule", {
9825 value: true
9826});
9827
9828var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
9829
9830var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
9831
9832var _stream = require('./stream');
9833
9834var _stream2 = _interopRequireDefault(_stream);
9835
9836var _lineStream = require('./line-stream');
9837
9838var _lineStream2 = _interopRequireDefault(_lineStream);
9839
9840var _parseStream = require('./parse-stream');
9841
9842var _parseStream2 = _interopRequireDefault(_parseStream);
9843
9844function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
9845
9846function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
9847
9848function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
9849
9850function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
9851 * @file m3u8/parser.js
9852 */
9853
9854
9855/**
9856 * A parser for M3U8 files. The current interpretation of the input is
9857 * exposed as a property `manifest` on parser objects. It's just two lines to
9858 * create and parse a manifest once you have the contents available as a string:
9859 *
9860 * ```js
9861 * var parser = new m3u8.Parser();
9862 * parser.push(xhr.responseText);
9863 * ```
9864 *
9865 * New input can later be applied to update the manifest object by calling
9866 * `push` again.
9867 *
9868 * The parser attempts to create a usable manifest object even if the
9869 * underlying input is somewhat nonsensical. It emits `info` and `warning`
9870 * events during the parse if it encounters input that seems invalid or
9871 * requires some property of the manifest object to be defaulted.
9872 *
9873 * @class Parser
9874 * @extends Stream
9875 */
9876var Parser = function (_Stream) {
9877 _inherits(Parser, _Stream);
9878
9879 function Parser() {
9880 _classCallCheck(this, Parser);
9881
9882 var _this = _possibleConstructorReturn(this, (Parser.__proto__ || Object.getPrototypeOf(Parser)).call(this));
9883
9884 _this.lineStream = new _lineStream2['default']();
9885 _this.parseStream = new _parseStream2['default']();
9886 _this.lineStream.pipe(_this.parseStream);
9887 /* eslint-disable consistent-this */
9888 var self = _this;
9889 /* eslint-enable consistent-this */
9890 var uris = [];
9891 var currentUri = {};
9892 // if specified, the active EXT-X-MAP definition
9893 var currentMap = void 0;
9894 // if specified, the active decryption key
9895 var _key = void 0;
9896 var noop = function noop() {};
9897 var defaultMediaGroups = {
9898 'AUDIO': {},
9899 'VIDEO': {},
9900 'CLOSED-CAPTIONS': {},
9901 'SUBTITLES': {}
9902 };
9903 // group segments into numbered timelines delineated by discontinuities
9904 var currentTimeline = 0;
9905
9906 // the manifest is empty until the parse stream begins delivering data
9907 _this.manifest = {
9908 allowCache: true,
9909 discontinuityStarts: [],
9910 segments: []
9911 };
9912
9913 // update the manifest with the m3u8 entry from the parse stream
9914 _this.parseStream.on('data', function (entry) {
9915 var mediaGroup = void 0;
9916 var rendition = void 0;
9917
9918 ({
9919 tag: function tag() {
9920 // switch based on the tag type
9921 (({
9922 'allow-cache': function allowCache() {
9923 this.manifest.allowCache = entry.allowed;
9924 if (!('allowed' in entry)) {
9925 this.trigger('info', {
9926 message: 'defaulting allowCache to YES'
9927 });
9928 this.manifest.allowCache = true;
9929 }
9930 },
9931 byterange: function byterange() {
9932 var byterange = {};
9933
9934 if ('length' in entry) {
9935 currentUri.byterange = byterange;
9936 byterange.length = entry.length;
9937
9938 if (!('offset' in entry)) {
9939 this.trigger('info', {
9940 message: 'defaulting offset to zero'
9941 });
9942 entry.offset = 0;
9943 }
9944 }
9945 if ('offset' in entry) {
9946 currentUri.byterange = byterange;
9947 byterange.offset = entry.offset;
9948 }
9949 },
9950 endlist: function endlist() {
9951 this.manifest.endList = true;
9952 },
9953 inf: function inf() {
9954 if (!('mediaSequence' in this.manifest)) {
9955 this.manifest.mediaSequence = 0;
9956 this.trigger('info', {
9957 message: 'defaulting media sequence to zero'
9958 });
9959 }
9960 if (!('discontinuitySequence' in this.manifest)) {
9961 this.manifest.discontinuitySequence = 0;
9962 this.trigger('info', {
9963 message: 'defaulting discontinuity sequence to zero'
9964 });
9965 }
9966 if (entry.duration > 0) {
9967 currentUri.duration = entry.duration;
9968 }
9969
9970 if (entry.duration === 0) {
9971 currentUri.duration = 0.01;
9972 this.trigger('info', {
9973 message: 'updating zero segment duration to a small value'
9974 });
9975 }
9976
9977 this.manifest.segments = uris;
9978 },
9979 key: function key() {
9980 if (!entry.attributes) {
9981 this.trigger('warn', {
9982 message: 'ignoring key declaration without attribute list'
9983 });
9984 return;
9985 }
9986 // clear the active encryption key
9987 if (entry.attributes.METHOD === 'NONE') {
9988 _key = null;
9989 return;
9990 }
9991 if (!entry.attributes.URI) {
9992 this.trigger('warn', {
9993 message: 'ignoring key declaration without URI'
9994 });
9995 return;
9996 }
9997 if (!entry.attributes.METHOD) {
9998 this.trigger('warn', {
9999 message: 'defaulting key method to AES-128'
10000 });
10001 }
10002
10003 // setup an encryption key for upcoming segments
10004 _key = {
10005 method: entry.attributes.METHOD || 'AES-128',
10006 uri: entry.attributes.URI
10007 };
10008
10009 if (typeof entry.attributes.IV !== 'undefined') {
10010 _key.iv = entry.attributes.IV;
10011 }
10012 },
10013 'media-sequence': function mediaSequence() {
10014 if (!isFinite(entry.number)) {
10015 this.trigger('warn', {
10016 message: 'ignoring invalid media sequence: ' + entry.number
10017 });
10018 return;
10019 }
10020 this.manifest.mediaSequence = entry.number;
10021 },
10022 'discontinuity-sequence': function discontinuitySequence() {
10023 if (!isFinite(entry.number)) {
10024 this.trigger('warn', {
10025 message: 'ignoring invalid discontinuity sequence: ' + entry.number
10026 });
10027 return;
10028 }
10029 this.manifest.discontinuitySequence = entry.number;
10030 currentTimeline = entry.number;
10031 },
10032 'playlist-type': function playlistType() {
10033 if (!/VOD|EVENT/.test(entry.playlistType)) {
10034 this.trigger('warn', {
10035 message: 'ignoring unknown playlist type: ' + entry.playlist
10036 });
10037 return;
10038 }
10039 this.manifest.playlistType = entry.playlistType;
10040 },
10041 map: function map() {
10042 currentMap = {};
10043 if (entry.uri) {
10044 currentMap.uri = entry.uri;
10045 }
10046 if (entry.byterange) {
10047 currentMap.byterange = entry.byterange;
10048 }
10049 },
10050 'stream-inf': function streamInf() {
10051 this.manifest.playlists = uris;
10052 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
10053
10054 if (!entry.attributes) {
10055 this.trigger('warn', {
10056 message: 'ignoring empty stream-inf attributes'
10057 });
10058 return;
10059 }
10060
10061 if (!currentUri.attributes) {
10062 currentUri.attributes = {};
10063 }
10064 _extends(currentUri.attributes, entry.attributes);
10065 },
10066 media: function media() {
10067 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
10068
10069 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
10070 this.trigger('warn', {
10071 message: 'ignoring incomplete or missing media group'
10072 });
10073 return;
10074 }
10075
10076 // find the media group, creating defaults as necessary
10077 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
10078
10079 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
10080 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']];
10081
10082 // collect the rendition metadata
10083 rendition = {
10084 'default': /yes/i.test(entry.attributes.DEFAULT)
10085 };
10086 if (rendition['default']) {
10087 rendition.autoselect = true;
10088 } else {
10089 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
10090 }
10091 if (entry.attributes.LANGUAGE) {
10092 rendition.language = entry.attributes.LANGUAGE;
10093 }
10094 if (entry.attributes.URI) {
10095 rendition.uri = entry.attributes.URI;
10096 }
10097 if (entry.attributes['INSTREAM-ID']) {
10098 rendition.instreamId = entry.attributes['INSTREAM-ID'];
10099 }
10100 if (entry.attributes.CHARACTERISTICS) {
10101 rendition.characteristics = entry.attributes.CHARACTERISTICS;
10102 }
10103 if (entry.attributes.FORCED) {
10104 rendition.forced = /yes/i.test(entry.attributes.FORCED);
10105 }
10106
10107 // insert the new rendition
10108 mediaGroup[entry.attributes.NAME] = rendition;
10109 },
10110 discontinuity: function discontinuity() {
10111 currentTimeline += 1;
10112 currentUri.discontinuity = true;
10113 this.manifest.discontinuityStarts.push(uris.length);
10114 },
10115 'program-date-time': function programDateTime() {
10116 this.manifest.dateTimeString = entry.dateTimeString;
10117 this.manifest.dateTimeObject = entry.dateTimeObject;
10118 },
10119 targetduration: function targetduration() {
10120 if (!isFinite(entry.duration) || entry.duration < 0) {
10121 this.trigger('warn', {
10122 message: 'ignoring invalid target duration: ' + entry.duration
10123 });
10124 return;
10125 }
10126 this.manifest.targetDuration = entry.duration;
10127 },
10128 totalduration: function totalduration() {
10129 if (!isFinite(entry.duration) || entry.duration < 0) {
10130 this.trigger('warn', {
10131 message: 'ignoring invalid total duration: ' + entry.duration
10132 });
10133 return;
10134 }
10135 this.manifest.totalDuration = entry.duration;
10136 },
10137 'cue-out': function cueOut() {
10138 currentUri.cueOut = entry.data;
10139 },
10140 'cue-out-cont': function cueOutCont() {
10141 currentUri.cueOutCont = entry.data;
10142 },
10143 'cue-in': function cueIn() {
10144 currentUri.cueIn = entry.data;
10145 }
10146 })[entry.tagType] || noop).call(self);
10147 },
10148 uri: function uri() {
10149 currentUri.uri = entry.uri;
10150 uris.push(currentUri);
10151
10152 // if no explicit duration was declared, use the target duration
10153 if (this.manifest.targetDuration && !('duration' in currentUri)) {
10154 this.trigger('warn', {
10155 message: 'defaulting segment duration to the target duration'
10156 });
10157 currentUri.duration = this.manifest.targetDuration;
10158 }
10159 // annotate with encryption information, if necessary
10160 if (_key) {
10161 currentUri.key = _key;
10162 }
10163 currentUri.timeline = currentTimeline;
10164 // annotate with initialization segment information, if necessary
10165 if (currentMap) {
10166 currentUri.map = currentMap;
10167 }
10168
10169 // prepare for the next URI
10170 currentUri = {};
10171 },
10172 comment: function comment() {
10173 // comments are not important for playback
10174 }
10175 })[entry.type].call(self);
10176 });
10177
10178 return _this;
10179 }
10180
10181 /**
10182 * Parse the input string and update the manifest object.
10183 *
10184 * @param {String} chunk a potentially incomplete portion of the manifest
10185 */
10186
10187
10188 _createClass(Parser, [{
10189 key: 'push',
10190 value: function push(chunk) {
10191 this.lineStream.push(chunk);
10192 }
10193
10194 /**
10195 * Flush any remaining input. This can be handy if the last line of an M3U8
10196 * manifest did not contain a trailing newline but the file has been
10197 * completely received.
10198 */
10199
10200 }, {
10201 key: 'end',
10202 value: function end() {
10203 // flush any buffered input
10204 this.lineStream.push('\n');
10205 }
10206 }]);
10207
10208 return Parser;
10209}(_stream2['default']);
10210
10211exports['default'] = Parser;
10212},{"./line-stream":34,"./parse-stream":35,"./stream":37}],37:[function(require,module,exports){
10213'use strict';
10214
10215Object.defineProperty(exports, "__esModule", {
10216 value: true
10217});
10218
10219var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
10220
10221function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
10222
10223/**
10224 * @file stream.js
10225 */
10226/**
10227 * A lightweight readable stream implemention that handles event dispatching.
10228 *
10229 * @class Stream
10230 */
10231var Stream = function () {
10232 function Stream() {
10233 _classCallCheck(this, Stream);
10234
10235 this.listeners = {};
10236 }
10237
10238 /**
10239 * Add a listener for a specified event type.
10240 *
10241 * @param {String} type the event name
10242 * @param {Function} listener the callback to be invoked when an event of
10243 * the specified type occurs
10244 */
10245
10246
10247 _createClass(Stream, [{
10248 key: 'on',
10249 value: function on(type, listener) {
10250 if (!this.listeners[type]) {
10251 this.listeners[type] = [];
10252 }
10253 this.listeners[type].push(listener);
10254 }
10255
10256 /**
10257 * Remove a listener for a specified event type.
10258 *
10259 * @param {String} type the event name
10260 * @param {Function} listener a function previously registered for this
10261 * type of event through `on`
10262 * @return {Boolean} if we could turn it off or not
10263 */
10264
10265 }, {
10266 key: 'off',
10267 value: function off(type, listener) {
10268 if (!this.listeners[type]) {
10269 return false;
10270 }
10271
10272 var index = this.listeners[type].indexOf(listener);
10273
10274 this.listeners[type].splice(index, 1);
10275 return index > -1;
10276 }
10277
10278 /**
10279 * Trigger an event of the specified type on this stream. Any additional
10280 * arguments to this function are passed as parameters to event listeners.
10281 *
10282 * @param {String} type the event name
10283 */
10284
10285 }, {
10286 key: 'trigger',
10287 value: function trigger(type) {
10288 var callbacks = this.listeners[type];
10289 var i = void 0;
10290 var length = void 0;
10291 var args = void 0;
10292
10293 if (!callbacks) {
10294 return;
10295 }
10296 // Slicing the arguments on every invocation of this method
10297 // can add a significant amount of overhead. Avoid the
10298 // intermediate object creation for the common case of a
10299 // single callback argument
10300 if (arguments.length === 2) {
10301 length = callbacks.length;
10302 for (i = 0; i < length; ++i) {
10303 callbacks[i].call(this, arguments[1]);
10304 }
10305 } else {
10306 args = Array.prototype.slice.call(arguments, 1);
10307 length = callbacks.length;
10308 for (i = 0; i < length; ++i) {
10309 callbacks[i].apply(this, args);
10310 }
10311 }
10312 }
10313
10314 /**
10315 * Destroys the stream and cleans up.
10316 */
10317
10318 }, {
10319 key: 'dispose',
10320 value: function dispose() {
10321 this.listeners = {};
10322 }
10323 /**
10324 * Forwards all `data` events on this stream to the destination stream. The
10325 * destination stream should provide a method `push` to receive the data
10326 * events as they arrive.
10327 *
10328 * @param {Stream} destination the stream that will receive all `data` events
10329 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
10330 */
10331
10332 }, {
10333 key: 'pipe',
10334 value: function pipe(destination) {
10335 this.on('data', function (data) {
10336 destination.push(data);
10337 });
10338 }
10339 }]);
10340
10341 return Stream;
10342}();
10343
10344exports['default'] = Stream;
10345},{}],38:[function(require,module,exports){
10346/**
10347 * mux.js
10348 *
10349 * Copyright (c) 2016 Brightcove
10350 * All rights reserved.
10351 *
10352 * A stream-based aac to mp4 converter. This utility can be used to
10353 * deliver mp4s to a SourceBuffer on platforms that support native
10354 * Media Source Extensions.
10355 */
10356'use strict';
10357var Stream = require('../utils/stream.js');
10358
10359// Constants
10360var AacStream;
10361
10362/**
10363 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
10364 */
10365
10366AacStream = function() {
10367 var
10368 everything = new Uint8Array(),
10369 timeStamp = 0;
10370
10371 AacStream.prototype.init.call(this);
10372
10373 this.setTimestamp = function(timestamp) {
10374 timeStamp = timestamp;
10375 };
10376
10377 this.parseId3TagSize = function(header, byteIndex) {
10378 var
10379 returnSize = (header[byteIndex + 6] << 21) |
10380 (header[byteIndex + 7] << 14) |
10381 (header[byteIndex + 8] << 7) |
10382 (header[byteIndex + 9]),
10383 flags = header[byteIndex + 5],
10384 footerPresent = (flags & 16) >> 4;
10385
10386 if (footerPresent) {
10387 return returnSize + 20;
10388 }
10389 return returnSize + 10;
10390 };
10391
10392 this.parseAdtsSize = function(header, byteIndex) {
10393 var
10394 lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
10395 middle = header[byteIndex + 4] << 3,
10396 highTwo = header[byteIndex + 3] & 0x3 << 11;
10397
10398 return (highTwo | middle) | lowThree;
10399 };
10400
10401 this.push = function(bytes) {
10402 var
10403 frameSize = 0,
10404 byteIndex = 0,
10405 bytesLeft,
10406 chunk,
10407 packet,
10408 tempLength;
10409
10410 // If there are bytes remaining from the last segment, prepend them to the
10411 // bytes that were pushed in
10412 if (everything.length) {
10413 tempLength = everything.length;
10414 everything = new Uint8Array(bytes.byteLength + tempLength);
10415 everything.set(everything.subarray(0, tempLength));
10416 everything.set(bytes, tempLength);
10417 } else {
10418 everything = bytes;
10419 }
10420
10421 while (everything.length - byteIndex >= 3) {
10422 if ((everything[byteIndex] === 'I'.charCodeAt(0)) &&
10423 (everything[byteIndex + 1] === 'D'.charCodeAt(0)) &&
10424 (everything[byteIndex + 2] === '3'.charCodeAt(0))) {
10425
10426 // Exit early because we don't have enough to parse
10427 // the ID3 tag header
10428 if (everything.length - byteIndex < 10) {
10429 break;
10430 }
10431
10432 // check framesize
10433 frameSize = this.parseId3TagSize(everything, byteIndex);
10434
10435 // Exit early if we don't have enough in the buffer
10436 // to emit a full packet
10437 if (frameSize > everything.length) {
10438 break;
10439 }
10440 chunk = {
10441 type: 'timed-metadata',
10442 data: everything.subarray(byteIndex, byteIndex + frameSize)
10443 };
10444 this.trigger('data', chunk);
10445 byteIndex += frameSize;
10446 continue;
10447 } else if ((everything[byteIndex] & 0xff === 0xff) &&
10448 ((everything[byteIndex + 1] & 0xf0) === 0xf0)) {
10449
10450 // Exit early because we don't have enough to parse
10451 // the ADTS frame header
10452 if (everything.length - byteIndex < 7) {
10453 break;
10454 }
10455
10456 frameSize = this.parseAdtsSize(everything, byteIndex);
10457
10458 // Exit early if we don't have enough in the buffer
10459 // to emit a full packet
10460 if (frameSize > everything.length) {
10461 break;
10462 }
10463
10464 packet = {
10465 type: 'audio',
10466 data: everything.subarray(byteIndex, byteIndex + frameSize),
10467 pts: timeStamp,
10468 dts: timeStamp
10469 };
10470 this.trigger('data', packet);
10471 byteIndex += frameSize;
10472 continue;
10473 }
10474 byteIndex++;
10475 }
10476 bytesLeft = everything.length - byteIndex;
10477
10478 if (bytesLeft > 0) {
10479 everything = everything.subarray(byteIndex);
10480 } else {
10481 everything = new Uint8Array();
10482 }
10483 };
10484};
10485
10486AacStream.prototype = new Stream();
10487
10488module.exports = AacStream;
10489
10490},{"../utils/stream.js":62}],39:[function(require,module,exports){
10491/**
10492 * mux.js
10493 *
10494 * Copyright (c) 2016 Brightcove
10495 * All rights reserved.
10496 *
10497 * Utilities to detect basic properties and metadata about Aac data.
10498 */
10499'use strict';
10500
10501var ADTS_SAMPLING_FREQUENCIES = [
10502 96000,
10503 88200,
10504 64000,
10505 48000,
10506 44100,
10507 32000,
10508 24000,
10509 22050,
10510 16000,
10511 12000,
10512 11025,
10513 8000,
10514 7350
10515];
10516
10517var parseSyncSafeInteger = function(data) {
10518 return (data[0] << 21) |
10519 (data[1] << 14) |
10520 (data[2] << 7) |
10521 (data[3]);
10522};
10523
10524// return a percent-encoded representation of the specified byte range
10525// @see http://en.wikipedia.org/wiki/Percent-encoding
10526var percentEncode = function(bytes, start, end) {
10527 var i, result = '';
10528 for (i = start; i < end; i++) {
10529 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
10530 }
10531 return result;
10532};
10533
10534// return the string representation of the specified byte range,
10535// interpreted as ISO-8859-1.
10536var parseIso88591 = function(bytes, start, end) {
10537 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
10538};
10539
10540var parseId3TagSize = function(header, byteIndex) {
10541 var
10542 returnSize = (header[byteIndex + 6] << 21) |
10543 (header[byteIndex + 7] << 14) |
10544 (header[byteIndex + 8] << 7) |
10545 (header[byteIndex + 9]),
10546 flags = header[byteIndex + 5],
10547 footerPresent = (flags & 16) >> 4;
10548
10549 if (footerPresent) {
10550 return returnSize + 20;
10551 }
10552 return returnSize + 10;
10553};
10554
10555var parseAdtsSize = function(header, byteIndex) {
10556 var
10557 lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
10558 middle = header[byteIndex + 4] << 3,
10559 highTwo = header[byteIndex + 3] & 0x3 << 11;
10560
10561 return (highTwo | middle) | lowThree;
10562};
10563
10564var parseType = function(header, byteIndex) {
10565 if ((header[byteIndex] === 'I'.charCodeAt(0)) &&
10566 (header[byteIndex + 1] === 'D'.charCodeAt(0)) &&
10567 (header[byteIndex + 2] === '3'.charCodeAt(0))) {
10568 return 'timed-metadata';
10569 } else if ((header[byteIndex] & 0xff === 0xff) &&
10570 ((header[byteIndex + 1] & 0xf0) === 0xf0)) {
10571 return 'audio';
10572 }
10573 return null;
10574};
10575
10576var parseSampleRate = function(packet) {
10577 var i = 0;
10578
10579 while (i + 5 < packet.length) {
10580 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
10581 // If a valid header was not found, jump one forward and attempt to
10582 // find a valid ADTS header starting at the next byte
10583 i++;
10584 continue;
10585 }
10586 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
10587 }
10588
10589 return null;
10590};
10591
10592var parseAacTimestamp = function(packet) {
10593 var frameStart, frameSize, frame, frameHeader;
10594
10595 // find the start of the first frame and the end of the tag
10596 frameStart = 10;
10597 if (packet[5] & 0x40) {
10598 // advance the frame start past the extended header
10599 frameStart += 4; // header size field
10600 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
10601 }
10602
10603 // parse one or more ID3 frames
10604 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
10605 do {
10606 // determine the number of bytes in this frame
10607 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
10608 if (frameSize < 1) {
10609 return null;
10610 }
10611 frameHeader = String.fromCharCode(packet[frameStart],
10612 packet[frameStart + 1],
10613 packet[frameStart + 2],
10614 packet[frameStart + 3]);
10615
10616 if (frameHeader === 'PRIV') {
10617 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
10618
10619 for (var i = 0; i < frame.byteLength; i++) {
10620 if (frame[i] === 0) {
10621 var owner = parseIso88591(frame, 0, i);
10622 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
10623 var d = frame.subarray(i + 1);
10624 var size = ((d[3] & 0x01) << 30) |
10625 (d[4] << 22) |
10626 (d[5] << 14) |
10627 (d[6] << 6) |
10628 (d[7] >>> 2);
10629 size *= 4;
10630 size += d[7] & 0x03;
10631
10632 return size;
10633 }
10634 break;
10635 }
10636 }
10637 }
10638
10639 frameStart += 10; // advance past the frame header
10640 frameStart += frameSize; // advance past the frame body
10641 } while (frameStart < packet.byteLength);
10642 return null;
10643};
10644
10645module.exports = {
10646 parseId3TagSize: parseId3TagSize,
10647 parseAdtsSize: parseAdtsSize,
10648 parseType: parseType,
10649 parseSampleRate: parseSampleRate,
10650 parseAacTimestamp: parseAacTimestamp
10651};
10652
10653},{}],40:[function(require,module,exports){
10654'use strict';
10655
10656var Stream = require('../utils/stream.js');
10657
10658var AdtsStream;
10659
10660var
10661 ADTS_SAMPLING_FREQUENCIES = [
10662 96000,
10663 88200,
10664 64000,
10665 48000,
10666 44100,
10667 32000,
10668 24000,
10669 22050,
10670 16000,
10671 12000,
10672 11025,
10673 8000,
10674 7350
10675 ];
10676
10677/*
10678 * Accepts a ElementaryStream and emits data events with parsed
10679 * AAC Audio Frames of the individual packets. Input audio in ADTS
10680 * format is unpacked and re-emitted as AAC frames.
10681 *
10682 * @see http://wiki.multimedia.cx/index.php?title=ADTS
10683 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
10684 */
10685AdtsStream = function() {
10686 var buffer;
10687
10688 AdtsStream.prototype.init.call(this);
10689
10690 this.push = function(packet) {
10691 var
10692 i = 0,
10693 frameNum = 0,
10694 frameLength,
10695 protectionSkipBytes,
10696 frameEnd,
10697 oldBuffer,
10698 sampleCount,
10699 adtsFrameDuration;
10700
10701 if (packet.type !== 'audio') {
10702 // ignore non-audio data
10703 return;
10704 }
10705
10706 // Prepend any data in the buffer to the input data so that we can parse
10707 // aac frames the cross a PES packet boundary
10708 if (buffer) {
10709 oldBuffer = buffer;
10710 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
10711 buffer.set(oldBuffer);
10712 buffer.set(packet.data, oldBuffer.byteLength);
10713 } else {
10714 buffer = packet.data;
10715 }
10716
10717 // unpack any ADTS frames which have been fully received
10718 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
10719 while (i + 5 < buffer.length) {
10720
10721 // Loook for the start of an ADTS header..
10722 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
10723 // If a valid header was not found, jump one forward and attempt to
10724 // find a valid ADTS header starting at the next byte
10725 i++;
10726 continue;
10727 }
10728
10729 // The protection skip bit tells us if we have 2 bytes of CRC data at the
10730 // end of the ADTS header
10731 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
10732
10733 // Frame length is a 13 bit integer starting 16 bits from the
10734 // end of the sync sequence
10735 frameLength = ((buffer[i + 3] & 0x03) << 11) |
10736 (buffer[i + 4] << 3) |
10737 ((buffer[i + 5] & 0xe0) >> 5);
10738
10739 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
10740 adtsFrameDuration = (sampleCount * 90000) /
10741 ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
10742
10743 frameEnd = i + frameLength;
10744
10745 // If we don't have enough data to actually finish this ADTS frame, return
10746 // and wait for more data
10747 if (buffer.byteLength < frameEnd) {
10748 return;
10749 }
10750
10751 // Otherwise, deliver the complete AAC frame
10752 this.trigger('data', {
10753 pts: packet.pts + (frameNum * adtsFrameDuration),
10754 dts: packet.dts + (frameNum * adtsFrameDuration),
10755 sampleCount: sampleCount,
10756 audioobjecttype: ((buffer[i + 2] >>> 6) & 0x03) + 1,
10757 channelcount: ((buffer[i + 2] & 1) << 2) |
10758 ((buffer[i + 3] & 0xc0) >>> 6),
10759 samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
10760 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
10761 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
10762 samplesize: 16,
10763 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
10764 });
10765
10766 // If the buffer is empty, clear it and return
10767 if (buffer.byteLength === frameEnd) {
10768 buffer = undefined;
10769 return;
10770 }
10771
10772 frameNum++;
10773
10774 // Remove the finished frame from the buffer and start the process again
10775 buffer = buffer.subarray(frameEnd);
10776 }
10777 };
10778 this.flush = function() {
10779 this.trigger('done');
10780 };
10781};
10782
10783AdtsStream.prototype = new Stream();
10784
10785module.exports = AdtsStream;
10786
10787},{"../utils/stream.js":62}],41:[function(require,module,exports){
10788'use strict';
10789
10790var Stream = require('../utils/stream.js');
10791var ExpGolomb = require('../utils/exp-golomb.js');
10792
10793var H264Stream, NalByteStream;
10794var PROFILES_WITH_OPTIONAL_SPS_DATA;
10795
10796/**
10797 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
10798 */
10799NalByteStream = function() {
10800 var
10801 syncPoint = 0,
10802 i,
10803 buffer;
10804 NalByteStream.prototype.init.call(this);
10805
10806 this.push = function(data) {
10807 var swapBuffer;
10808
10809 if (!buffer) {
10810 buffer = data.data;
10811 } else {
10812 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
10813 swapBuffer.set(buffer);
10814 swapBuffer.set(data.data, buffer.byteLength);
10815 buffer = swapBuffer;
10816 }
10817
10818 // Rec. ITU-T H.264, Annex B
10819 // scan for NAL unit boundaries
10820
10821 // a match looks like this:
10822 // 0 0 1 .. NAL .. 0 0 1
10823 // ^ sync point ^ i
10824 // or this:
10825 // 0 0 1 .. NAL .. 0 0 0
10826 // ^ sync point ^ i
10827
10828 // advance the sync point to a NAL start, if necessary
10829 for (; syncPoint < buffer.byteLength - 3; syncPoint++) {
10830 if (buffer[syncPoint + 2] === 1) {
10831 // the sync point is properly aligned
10832 i = syncPoint + 5;
10833 break;
10834 }
10835 }
10836
10837 while (i < buffer.byteLength) {
10838 // look at the current byte to determine if we've hit the end of
10839 // a NAL unit boundary
10840 switch (buffer[i]) {
10841 case 0:
10842 // skip past non-sync sequences
10843 if (buffer[i - 1] !== 0) {
10844 i += 2;
10845 break;
10846 } else if (buffer[i - 2] !== 0) {
10847 i++;
10848 break;
10849 }
10850
10851 // deliver the NAL unit if it isn't empty
10852 if (syncPoint + 3 !== i - 2) {
10853 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
10854 }
10855
10856 // drop trailing zeroes
10857 do {
10858 i++;
10859 } while (buffer[i] !== 1 && i < buffer.length);
10860 syncPoint = i - 2;
10861 i += 3;
10862 break;
10863 case 1:
10864 // skip past non-sync sequences
10865 if (buffer[i - 1] !== 0 ||
10866 buffer[i - 2] !== 0) {
10867 i += 3;
10868 break;
10869 }
10870
10871 // deliver the NAL unit
10872 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
10873 syncPoint = i - 2;
10874 i += 3;
10875 break;
10876 default:
10877 // the current byte isn't a one or zero, so it cannot be part
10878 // of a sync sequence
10879 i += 3;
10880 break;
10881 }
10882 }
10883 // filter out the NAL units that were delivered
10884 buffer = buffer.subarray(syncPoint);
10885 i -= syncPoint;
10886 syncPoint = 0;
10887 };
10888
10889 this.flush = function() {
10890 // deliver the last buffered NAL unit
10891 if (buffer && buffer.byteLength > 3) {
10892 this.trigger('data', buffer.subarray(syncPoint + 3));
10893 }
10894 // reset the stream state
10895 buffer = null;
10896 syncPoint = 0;
10897 this.trigger('done');
10898 };
10899};
10900NalByteStream.prototype = new Stream();
10901
10902// values of profile_idc that indicate additional fields are included in the SPS
10903// see Recommendation ITU-T H.264 (4/2013),
10904// 7.3.2.1.1 Sequence parameter set data syntax
10905PROFILES_WITH_OPTIONAL_SPS_DATA = {
10906 100: true,
10907 110: true,
10908 122: true,
10909 244: true,
10910 44: true,
10911 83: true,
10912 86: true,
10913 118: true,
10914 128: true,
10915 138: true,
10916 139: true,
10917 134: true
10918};
10919
10920/**
10921 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
10922 * events.
10923 */
10924H264Stream = function() {
10925 var
10926 nalByteStream = new NalByteStream(),
10927 self,
10928 trackId,
10929 currentPts,
10930 currentDts,
10931
10932 discardEmulationPreventionBytes,
10933 readSequenceParameterSet,
10934 skipScalingList;
10935
10936 H264Stream.prototype.init.call(this);
10937 self = this;
10938
10939 this.push = function(packet) {
10940 if (packet.type !== 'video') {
10941 return;
10942 }
10943 trackId = packet.trackId;
10944 currentPts = packet.pts;
10945 currentDts = packet.dts;
10946
10947 nalByteStream.push(packet);
10948 };
10949
10950 nalByteStream.on('data', function(data) {
10951 var
10952 event = {
10953 trackId: trackId,
10954 pts: currentPts,
10955 dts: currentDts,
10956 data: data
10957 };
10958
10959 switch (data[0] & 0x1f) {
10960 case 0x05:
10961 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
10962 break;
10963 case 0x06:
10964 event.nalUnitType = 'sei_rbsp';
10965 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
10966 break;
10967 case 0x07:
10968 event.nalUnitType = 'seq_parameter_set_rbsp';
10969 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
10970 event.config = readSequenceParameterSet(event.escapedRBSP);
10971 break;
10972 case 0x08:
10973 event.nalUnitType = 'pic_parameter_set_rbsp';
10974 break;
10975 case 0x09:
10976 event.nalUnitType = 'access_unit_delimiter_rbsp';
10977 break;
10978
10979 default:
10980 break;
10981 }
10982 self.trigger('data', event);
10983 });
10984 nalByteStream.on('done', function() {
10985 self.trigger('done');
10986 });
10987
10988 this.flush = function() {
10989 nalByteStream.flush();
10990 };
10991
10992 /**
10993 * Advance the ExpGolomb decoder past a scaling list. The scaling
10994 * list is optionally transmitted as part of a sequence parameter
10995 * set and is not relevant to transmuxing.
10996 * @param count {number} the number of entries in this scaling list
10997 * @param expGolombDecoder {object} an ExpGolomb pointed to the
10998 * start of a scaling list
10999 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
11000 */
11001 skipScalingList = function(count, expGolombDecoder) {
11002 var
11003 lastScale = 8,
11004 nextScale = 8,
11005 j,
11006 deltaScale;
11007
11008 for (j = 0; j < count; j++) {
11009 if (nextScale !== 0) {
11010 deltaScale = expGolombDecoder.readExpGolomb();
11011 nextScale = (lastScale + deltaScale + 256) % 256;
11012 }
11013
11014 lastScale = (nextScale === 0) ? lastScale : nextScale;
11015 }
11016 };
11017
11018 /**
11019 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
11020 * Sequence Payload"
11021 * @param data {Uint8Array} the bytes of a RBSP from a NAL
11022 * unit
11023 * @return {Uint8Array} the RBSP without any Emulation
11024 * Prevention Bytes
11025 */
11026 discardEmulationPreventionBytes = function(data) {
11027 var
11028 length = data.byteLength,
11029 emulationPreventionBytesPositions = [],
11030 i = 1,
11031 newLength, newData;
11032
11033 // Find all `Emulation Prevention Bytes`
11034 while (i < length - 2) {
11035 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
11036 emulationPreventionBytesPositions.push(i + 2);
11037 i += 2;
11038 } else {
11039 i++;
11040 }
11041 }
11042
11043 // If no Emulation Prevention Bytes were found just return the original
11044 // array
11045 if (emulationPreventionBytesPositions.length === 0) {
11046 return data;
11047 }
11048
11049 // Create a new array to hold the NAL unit data
11050 newLength = length - emulationPreventionBytesPositions.length;
11051 newData = new Uint8Array(newLength);
11052 var sourceIndex = 0;
11053
11054 for (i = 0; i < newLength; sourceIndex++, i++) {
11055 if (sourceIndex === emulationPreventionBytesPositions[0]) {
11056 // Skip this byte
11057 sourceIndex++;
11058 // Remove this position index
11059 emulationPreventionBytesPositions.shift();
11060 }
11061 newData[i] = data[sourceIndex];
11062 }
11063
11064 return newData;
11065 };
11066
11067 /**
11068 * Read a sequence parameter set and return some interesting video
11069 * properties. A sequence parameter set is the H264 metadata that
11070 * describes the properties of upcoming video frames.
11071 * @param data {Uint8Array} the bytes of a sequence parameter set
11072 * @return {object} an object with configuration parsed from the
11073 * sequence parameter set, including the dimensions of the
11074 * associated video frames.
11075 */
11076 readSequenceParameterSet = function(data) {
11077 var
11078 frameCropLeftOffset = 0,
11079 frameCropRightOffset = 0,
11080 frameCropTopOffset = 0,
11081 frameCropBottomOffset = 0,
11082 sarScale = 1,
11083 expGolombDecoder, profileIdc, levelIdc, profileCompatibility,
11084 chromaFormatIdc, picOrderCntType,
11085 numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
11086 picHeightInMapUnitsMinus1,
11087 frameMbsOnlyFlag,
11088 scalingListCount,
11089 sarRatio,
11090 aspectRatioIdc,
11091 i;
11092
11093 expGolombDecoder = new ExpGolomb(data);
11094 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
11095 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
11096 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
11097 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
11098
11099 // some profiles have more optional data we don't need
11100 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
11101 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
11102 if (chromaFormatIdc === 3) {
11103 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
11104 }
11105 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
11106 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
11107 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
11108 if (expGolombDecoder.readBoolean()) { // seq_scaling_matrix_present_flag
11109 scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
11110 for (i = 0; i < scalingListCount; i++) {
11111 if (expGolombDecoder.readBoolean()) { // seq_scaling_list_present_flag[ i ]
11112 if (i < 6) {
11113 skipScalingList(16, expGolombDecoder);
11114 } else {
11115 skipScalingList(64, expGolombDecoder);
11116 }
11117 }
11118 }
11119 }
11120 }
11121
11122 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
11123 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
11124
11125 if (picOrderCntType === 0) {
11126 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
11127 } else if (picOrderCntType === 1) {
11128 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
11129 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
11130 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
11131 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
11132 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
11133 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
11134 }
11135 }
11136
11137 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
11138 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
11139
11140 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
11141 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
11142
11143 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
11144 if (frameMbsOnlyFlag === 0) {
11145 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
11146 }
11147
11148 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
11149 if (expGolombDecoder.readBoolean()) { // frame_cropping_flag
11150 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
11151 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
11152 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
11153 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
11154 }
11155 if (expGolombDecoder.readBoolean()) {
11156 // vui_parameters_present_flag
11157 if (expGolombDecoder.readBoolean()) {
11158 // aspect_ratio_info_present_flag
11159 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
11160 switch (aspectRatioIdc) {
11161 case 1: sarRatio = [1, 1]; break;
11162 case 2: sarRatio = [12, 11]; break;
11163 case 3: sarRatio = [10, 11]; break;
11164 case 4: sarRatio = [16, 11]; break;
11165 case 5: sarRatio = [40, 33]; break;
11166 case 6: sarRatio = [24, 11]; break;
11167 case 7: sarRatio = [20, 11]; break;
11168 case 8: sarRatio = [32, 11]; break;
11169 case 9: sarRatio = [80, 33]; break;
11170 case 10: sarRatio = [18, 11]; break;
11171 case 11: sarRatio = [15, 11]; break;
11172 case 12: sarRatio = [64, 33]; break;
11173 case 13: sarRatio = [160, 99]; break;
11174 case 14: sarRatio = [4, 3]; break;
11175 case 15: sarRatio = [3, 2]; break;
11176 case 16: sarRatio = [2, 1]; break;
11177 case 255: {
11178 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 |
11179 expGolombDecoder.readUnsignedByte(),
11180 expGolombDecoder.readUnsignedByte() << 8 |
11181 expGolombDecoder.readUnsignedByte() ];
11182 break;
11183 }
11184 }
11185 if (sarRatio) {
11186 sarScale = sarRatio[0] / sarRatio[1];
11187 }
11188 }
11189 }
11190 return {
11191 profileIdc: profileIdc,
11192 levelIdc: levelIdc,
11193 profileCompatibility: profileCompatibility,
11194 width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
11195 height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - (frameCropTopOffset * 2) - (frameCropBottomOffset * 2)
11196 };
11197 };
11198
11199};
11200H264Stream.prototype = new Stream();
11201
11202module.exports = {
11203 H264Stream: H264Stream,
11204 NalByteStream: NalByteStream
11205};
11206
11207},{"../utils/exp-golomb.js":61,"../utils/stream.js":62}],42:[function(require,module,exports){
11208var highPrefix = [33, 16, 5, 32, 164, 27];
11209var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
11210var zeroFill = function(count) {
11211 var a = [];
11212 while (count--) {
11213 a.push(0);
11214 }
11215 return a;
11216};
11217
11218var makeTable = function(metaTable) {
11219 return Object.keys(metaTable).reduce(function(obj, key) {
11220 obj[key] = new Uint8Array(metaTable[key].reduce(function(arr, part) {
11221 return arr.concat(part);
11222 }, []));
11223 return obj;
11224 }, {});
11225};
11226
11227// Frames-of-silence to use for filling in missing AAC frames
11228var coneOfSilence = {
11229 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
11230 88200: [highPrefix, [231], zeroFill(170), [56]],
11231 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
11232 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
11233 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
11234 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
11235 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
11236 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
11237 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
11238 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
11239 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
11240};
11241
11242module.exports = makeTable(coneOfSilence);
11243
11244},{}],43:[function(require,module,exports){
11245'use strict';
11246
11247var Stream = require('../utils/stream.js');
11248
11249/**
11250 * The final stage of the transmuxer that emits the flv tags
11251 * for audio, video, and metadata. Also tranlates in time and
11252 * outputs caption data and id3 cues.
11253 */
11254var CoalesceStream = function(options) {
11255 // Number of Tracks per output segment
11256 // If greater than 1, we combine multiple
11257 // tracks into a single segment
11258 this.numberOfTracks = 0;
11259 this.metadataStream = options.metadataStream;
11260
11261 this.videoTags = [];
11262 this.audioTags = [];
11263 this.videoTrack = null;
11264 this.audioTrack = null;
11265 this.pendingCaptions = [];
11266 this.pendingMetadata = [];
11267 this.pendingTracks = 0;
11268 this.processedTracks = 0;
11269
11270 CoalesceStream.prototype.init.call(this);
11271
11272 // Take output from multiple
11273 this.push = function(output) {
11274 // buffer incoming captions until the associated video segment
11275 // finishes
11276 if (output.text) {
11277 return this.pendingCaptions.push(output);
11278 }
11279 // buffer incoming id3 tags until the final flush
11280 if (output.frames) {
11281 return this.pendingMetadata.push(output);
11282 }
11283
11284 if (output.track.type === 'video') {
11285 this.videoTrack = output.track;
11286 this.videoTags = output.tags;
11287 this.pendingTracks++;
11288 }
11289 if (output.track.type === 'audio') {
11290 this.audioTrack = output.track;
11291 this.audioTags = output.tags;
11292 this.pendingTracks++;
11293 }
11294 };
11295};
11296
11297CoalesceStream.prototype = new Stream();
11298CoalesceStream.prototype.flush = function(flushSource) {
11299 var
11300 id3,
11301 caption,
11302 i,
11303 timelineStartPts,
11304 event = {
11305 tags: {},
11306 captions: [],
11307 captionStreams: {},
11308 metadata: []
11309 };
11310
11311 if (this.pendingTracks < this.numberOfTracks) {
11312 if (flushSource !== 'VideoSegmentStream' &&
11313 flushSource !== 'AudioSegmentStream') {
11314 // Return because we haven't received a flush from a data-generating
11315 // portion of the segment (meaning that we have only recieved meta-data
11316 // or captions.)
11317 return;
11318 } else if (this.pendingTracks === 0) {
11319 // In the case where we receive a flush without any data having been
11320 // received we consider it an emitted track for the purposes of coalescing
11321 // `done` events.
11322 // We do this for the case where there is an audio and video track in the
11323 // segment but no audio data. (seen in several playlists with alternate
11324 // audio tracks and no audio present in the main TS segments.)
11325 this.processedTracks++;
11326
11327 if (this.processedTracks < this.numberOfTracks) {
11328 return;
11329 }
11330 }
11331 }
11332
11333 this.processedTracks += this.pendingTracks;
11334 this.pendingTracks = 0;
11335
11336 if (this.processedTracks < this.numberOfTracks) {
11337 return;
11338 }
11339
11340 if (this.videoTrack) {
11341 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
11342 } else if (this.audioTrack) {
11343 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
11344 }
11345
11346 event.tags.videoTags = this.videoTags;
11347 event.tags.audioTags = this.audioTags;
11348
11349 // Translate caption PTS times into second offsets into the
11350 // video timeline for the segment, and add track info
11351 for (i = 0; i < this.pendingCaptions.length; i++) {
11352 caption = this.pendingCaptions[i];
11353 caption.startTime = caption.startPts - timelineStartPts;
11354 caption.startTime /= 90e3;
11355 caption.endTime = caption.endPts - timelineStartPts;
11356 caption.endTime /= 90e3;
11357 event.captionStreams[caption.stream] = true;
11358 event.captions.push(caption);
11359 }
11360
11361 // Translate ID3 frame PTS times into second offsets into the
11362 // video timeline for the segment
11363 for (i = 0; i < this.pendingMetadata.length; i++) {
11364 id3 = this.pendingMetadata[i];
11365 id3.cueTime = id3.pts - timelineStartPts;
11366 id3.cueTime /= 90e3;
11367 event.metadata.push(id3);
11368 }
11369 // We add this to every single emitted segment even though we only need
11370 // it for the first
11371 event.metadata.dispatchType = this.metadataStream.dispatchType;
11372
11373 // Reset stream state
11374 this.videoTrack = null;
11375 this.audioTrack = null;
11376 this.videoTags = [];
11377 this.audioTags = [];
11378 this.pendingCaptions.length = 0;
11379 this.pendingMetadata.length = 0;
11380 this.pendingTracks = 0;
11381 this.processedTracks = 0;
11382
11383 // Emit the final segment
11384 this.trigger('data', event);
11385
11386 this.trigger('done');
11387};
11388
11389module.exports = CoalesceStream;
11390
11391},{"../utils/stream.js":62}],44:[function(require,module,exports){
11392'use strict';
11393
11394var FlvTag = require('./flv-tag.js');
11395
11396// For information on the FLV format, see
11397// http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf.
11398// Technically, this function returns the header and a metadata FLV tag
11399// if duration is greater than zero
11400// duration in seconds
11401// @return {object} the bytes of the FLV header as a Uint8Array
11402var getFlvHeader = function(duration, audio, video) { // :ByteArray {
11403 var
11404 headBytes = new Uint8Array(3 + 1 + 1 + 4),
11405 head = new DataView(headBytes.buffer),
11406 metadata,
11407 result,
11408 metadataLength;
11409
11410 // default arguments
11411 duration = duration || 0;
11412 audio = audio === undefined ? true : audio;
11413 video = video === undefined ? true : video;
11414
11415 // signature
11416 head.setUint8(0, 0x46); // 'F'
11417 head.setUint8(1, 0x4c); // 'L'
11418 head.setUint8(2, 0x56); // 'V'
11419
11420 // version
11421 head.setUint8(3, 0x01);
11422
11423 // flags
11424 head.setUint8(4, (audio ? 0x04 : 0x00) | (video ? 0x01 : 0x00));
11425
11426 // data offset, should be 9 for FLV v1
11427 head.setUint32(5, headBytes.byteLength);
11428
11429 // init the first FLV tag
11430 if (duration <= 0) {
11431 // no duration available so just write the first field of the first
11432 // FLV tag
11433 result = new Uint8Array(headBytes.byteLength + 4);
11434 result.set(headBytes);
11435 result.set([0, 0, 0, 0], headBytes.byteLength);
11436 return result;
11437 }
11438
11439 // write out the duration metadata tag
11440 metadata = new FlvTag(FlvTag.METADATA_TAG);
11441 metadata.pts = metadata.dts = 0;
11442 metadata.writeMetaDataDouble('duration', duration);
11443 metadataLength = metadata.finalize().length;
11444 result = new Uint8Array(headBytes.byteLength + metadataLength);
11445 result.set(headBytes);
11446 result.set(head.byteLength, metadataLength);
11447
11448 return result;
11449};
11450
11451module.exports = getFlvHeader;
11452
11453},{"./flv-tag.js":45}],45:[function(require,module,exports){
11454/**
11455 * An object that stores the bytes of an FLV tag and methods for
11456 * querying and manipulating that data.
11457 * @see http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf
11458 */
11459'use strict';
11460
11461var FlvTag;
11462
11463// (type:uint, extraData:Boolean = false) extends ByteArray
11464FlvTag = function(type, extraData) {
11465 var
11466 // Counter if this is a metadata tag, nal start marker if this is a video
11467 // tag. unused if this is an audio tag
11468 adHoc = 0, // :uint
11469
11470 // The default size is 16kb but this is not enough to hold iframe
11471 // data and the resizing algorithm costs a bit so we create a larger
11472 // starting buffer for video tags
11473 bufferStartSize = 16384,
11474
11475 // checks whether the FLV tag has enough capacity to accept the proposed
11476 // write and re-allocates the internal buffers if necessary
11477 prepareWrite = function(flv, count) {
11478 var
11479 bytes,
11480 minLength = flv.position + count;
11481 if (minLength < flv.bytes.byteLength) {
11482 // there's enough capacity so do nothing
11483 return;
11484 }
11485
11486 // allocate a new buffer and copy over the data that will not be modified
11487 bytes = new Uint8Array(minLength * 2);
11488 bytes.set(flv.bytes.subarray(0, flv.position), 0);
11489 flv.bytes = bytes;
11490 flv.view = new DataView(flv.bytes.buffer);
11491 },
11492
11493 // commonly used metadata properties
11494 widthBytes = FlvTag.widthBytes || new Uint8Array('width'.length),
11495 heightBytes = FlvTag.heightBytes || new Uint8Array('height'.length),
11496 videocodecidBytes = FlvTag.videocodecidBytes || new Uint8Array('videocodecid'.length),
11497 i;
11498
11499 if (!FlvTag.widthBytes) {
11500 // calculating the bytes of common metadata names ahead of time makes the
11501 // corresponding writes faster because we don't have to loop over the
11502 // characters
11503 // re-test with test/perf.html if you're planning on changing this
11504 for (i = 0; i < 'width'.length; i++) {
11505 widthBytes[i] = 'width'.charCodeAt(i);
11506 }
11507 for (i = 0; i < 'height'.length; i++) {
11508 heightBytes[i] = 'height'.charCodeAt(i);
11509 }
11510 for (i = 0; i < 'videocodecid'.length; i++) {
11511 videocodecidBytes[i] = 'videocodecid'.charCodeAt(i);
11512 }
11513
11514 FlvTag.widthBytes = widthBytes;
11515 FlvTag.heightBytes = heightBytes;
11516 FlvTag.videocodecidBytes = videocodecidBytes;
11517 }
11518
11519 this.keyFrame = false; // :Boolean
11520
11521 switch (type) {
11522 case FlvTag.VIDEO_TAG:
11523 this.length = 16;
11524 // Start the buffer at 256k
11525 bufferStartSize *= 6;
11526 break;
11527 case FlvTag.AUDIO_TAG:
11528 this.length = 13;
11529 this.keyFrame = true;
11530 break;
11531 case FlvTag.METADATA_TAG:
11532 this.length = 29;
11533 this.keyFrame = true;
11534 break;
11535 default:
11536 throw new Error('Unknown FLV tag type');
11537 }
11538
11539 this.bytes = new Uint8Array(bufferStartSize);
11540 this.view = new DataView(this.bytes.buffer);
11541 this.bytes[0] = type;
11542 this.position = this.length;
11543 this.keyFrame = extraData; // Defaults to false
11544
11545 // presentation timestamp
11546 this.pts = 0;
11547 // decoder timestamp
11548 this.dts = 0;
11549
11550 // ByteArray#writeBytes(bytes:ByteArray, offset:uint = 0, length:uint = 0)
11551 this.writeBytes = function(bytes, offset, length) {
11552 var
11553 start = offset || 0,
11554 end;
11555 length = length || bytes.byteLength;
11556 end = start + length;
11557
11558 prepareWrite(this, length);
11559 this.bytes.set(bytes.subarray(start, end), this.position);
11560
11561 this.position += length;
11562 this.length = Math.max(this.length, this.position);
11563 };
11564
11565 // ByteArray#writeByte(value:int):void
11566 this.writeByte = function(byte) {
11567 prepareWrite(this, 1);
11568 this.bytes[this.position] = byte;
11569 this.position++;
11570 this.length = Math.max(this.length, this.position);
11571 };
11572
11573 // ByteArray#writeShort(value:int):void
11574 this.writeShort = function(short) {
11575 prepareWrite(this, 2);
11576 this.view.setUint16(this.position, short);
11577 this.position += 2;
11578 this.length = Math.max(this.length, this.position);
11579 };
11580
11581 // Negative index into array
11582 // (pos:uint):int
11583 this.negIndex = function(pos) {
11584 return this.bytes[this.length - pos];
11585 };
11586
11587 // The functions below ONLY work when this[0] == VIDEO_TAG.
11588 // We are not going to check for that because we dont want the overhead
11589 // (nal:ByteArray = null):int
11590 this.nalUnitSize = function() {
11591 if (adHoc === 0) {
11592 return 0;
11593 }
11594
11595 return this.length - (adHoc + 4);
11596 };
11597
11598 this.startNalUnit = function() {
11599 // remember position and add 4 bytes
11600 if (adHoc > 0) {
11601 throw new Error('Attempted to create new NAL wihout closing the old one');
11602 }
11603
11604 // reserve 4 bytes for nal unit size
11605 adHoc = this.length;
11606 this.length += 4;
11607 this.position = this.length;
11608 };
11609
11610 // (nal:ByteArray = null):void
11611 this.endNalUnit = function(nalContainer) {
11612 var
11613 nalStart, // :uint
11614 nalLength; // :uint
11615
11616 // Rewind to the marker and write the size
11617 if (this.length === adHoc + 4) {
11618 // we started a nal unit, but didnt write one, so roll back the 4 byte size value
11619 this.length -= 4;
11620 } else if (adHoc > 0) {
11621 nalStart = adHoc + 4;
11622 nalLength = this.length - nalStart;
11623
11624 this.position = adHoc;
11625 this.view.setUint32(this.position, nalLength);
11626 this.position = this.length;
11627
11628 if (nalContainer) {
11629 // Add the tag to the NAL unit
11630 nalContainer.push(this.bytes.subarray(nalStart, nalStart + nalLength));
11631 }
11632 }
11633
11634 adHoc = 0;
11635 };
11636
11637 /**
11638 * Write out a 64-bit floating point valued metadata property. This method is
11639 * called frequently during a typical parse and needs to be fast.
11640 */
11641 // (key:String, val:Number):void
11642 this.writeMetaDataDouble = function(key, val) {
11643 var i;
11644 prepareWrite(this, 2 + key.length + 9);
11645
11646 // write size of property name
11647 this.view.setUint16(this.position, key.length);
11648 this.position += 2;
11649
11650 // this next part looks terrible but it improves parser throughput by
11651 // 10kB/s in my testing
11652
11653 // write property name
11654 if (key === 'width') {
11655 this.bytes.set(widthBytes, this.position);
11656 this.position += 5;
11657 } else if (key === 'height') {
11658 this.bytes.set(heightBytes, this.position);
11659 this.position += 6;
11660 } else if (key === 'videocodecid') {
11661 this.bytes.set(videocodecidBytes, this.position);
11662 this.position += 12;
11663 } else {
11664 for (i = 0; i < key.length; i++) {
11665 this.bytes[this.position] = key.charCodeAt(i);
11666 this.position++;
11667 }
11668 }
11669
11670 // skip null byte
11671 this.position++;
11672
11673 // write property value
11674 this.view.setFloat64(this.position, val);
11675 this.position += 8;
11676
11677 // update flv tag length
11678 this.length = Math.max(this.length, this.position);
11679 ++adHoc;
11680 };
11681
11682 // (key:String, val:Boolean):void
11683 this.writeMetaDataBoolean = function(key, val) {
11684 var i;
11685 prepareWrite(this, 2);
11686 this.view.setUint16(this.position, key.length);
11687 this.position += 2;
11688 for (i = 0; i < key.length; i++) {
11689 // if key.charCodeAt(i) >= 255, handle error
11690 prepareWrite(this, 1);
11691 this.bytes[this.position] = key.charCodeAt(i);
11692 this.position++;
11693 }
11694 prepareWrite(this, 2);
11695 this.view.setUint8(this.position, 0x01);
11696 this.position++;
11697 this.view.setUint8(this.position, val ? 0x01 : 0x00);
11698 this.position++;
11699 this.length = Math.max(this.length, this.position);
11700 ++adHoc;
11701 };
11702
11703 // ():ByteArray
11704 this.finalize = function() {
11705 var
11706 dtsDelta, // :int
11707 len; // :int
11708
11709 switch (this.bytes[0]) {
11710 // Video Data
11711 case FlvTag.VIDEO_TAG:
11712 // We only support AVC, 1 = key frame (for AVC, a seekable
11713 // frame), 2 = inter frame (for AVC, a non-seekable frame)
11714 this.bytes[11] = ((this.keyFrame || extraData) ? 0x10 : 0x20) | 0x07;
11715 this.bytes[12] = extraData ? 0x00 : 0x01;
11716
11717 dtsDelta = this.pts - this.dts;
11718 this.bytes[13] = (dtsDelta & 0x00FF0000) >>> 16;
11719 this.bytes[14] = (dtsDelta & 0x0000FF00) >>> 8;
11720 this.bytes[15] = (dtsDelta & 0x000000FF) >>> 0;
11721 break;
11722
11723 case FlvTag.AUDIO_TAG:
11724 this.bytes[11] = 0xAF; // 44 kHz, 16-bit stereo
11725 this.bytes[12] = extraData ? 0x00 : 0x01;
11726 break;
11727
11728 case FlvTag.METADATA_TAG:
11729 this.position = 11;
11730 this.view.setUint8(this.position, 0x02); // String type
11731 this.position++;
11732 this.view.setUint16(this.position, 0x0A); // 10 Bytes
11733 this.position += 2;
11734 // set "onMetaData"
11735 this.bytes.set([0x6f, 0x6e, 0x4d, 0x65,
11736 0x74, 0x61, 0x44, 0x61,
11737 0x74, 0x61], this.position);
11738 this.position += 10;
11739 this.bytes[this.position] = 0x08; // Array type
11740 this.position++;
11741 this.view.setUint32(this.position, adHoc);
11742 this.position = this.length;
11743 this.bytes.set([0, 0, 9], this.position);
11744 this.position += 3; // End Data Tag
11745 this.length = this.position;
11746 break;
11747 }
11748
11749 len = this.length - 11;
11750
11751 // write the DataSize field
11752 this.bytes[ 1] = (len & 0x00FF0000) >>> 16;
11753 this.bytes[ 2] = (len & 0x0000FF00) >>> 8;
11754 this.bytes[ 3] = (len & 0x000000FF) >>> 0;
11755 // write the Timestamp
11756 this.bytes[ 4] = (this.dts & 0x00FF0000) >>> 16;
11757 this.bytes[ 5] = (this.dts & 0x0000FF00) >>> 8;
11758 this.bytes[ 6] = (this.dts & 0x000000FF) >>> 0;
11759 this.bytes[ 7] = (this.dts & 0xFF000000) >>> 24;
11760 // write the StreamID
11761 this.bytes[ 8] = 0;
11762 this.bytes[ 9] = 0;
11763 this.bytes[10] = 0;
11764
11765 // Sometimes we're at the end of the view and have one slot to write a
11766 // uint32, so, prepareWrite of count 4, since, view is uint8
11767 prepareWrite(this, 4);
11768 this.view.setUint32(this.length, this.length);
11769 this.length += 4;
11770 this.position += 4;
11771
11772 // trim down the byte buffer to what is actually being used
11773 this.bytes = this.bytes.subarray(0, this.length);
11774 this.frameTime = FlvTag.frameTime(this.bytes);
11775 // if bytes.bytelength isn't equal to this.length, handle error
11776 return this;
11777 };
11778};
11779
11780FlvTag.AUDIO_TAG = 0x08; // == 8, :uint
11781FlvTag.VIDEO_TAG = 0x09; // == 9, :uint
11782FlvTag.METADATA_TAG = 0x12; // == 18, :uint
11783
11784// (tag:ByteArray):Boolean {
11785FlvTag.isAudioFrame = function(tag) {
11786 return FlvTag.AUDIO_TAG === tag[0];
11787};
11788
11789// (tag:ByteArray):Boolean {
11790FlvTag.isVideoFrame = function(tag) {
11791 return FlvTag.VIDEO_TAG === tag[0];
11792};
11793
11794// (tag:ByteArray):Boolean {
11795FlvTag.isMetaData = function(tag) {
11796 return FlvTag.METADATA_TAG === tag[0];
11797};
11798
11799// (tag:ByteArray):Boolean {
11800FlvTag.isKeyFrame = function(tag) {
11801 if (FlvTag.isVideoFrame(tag)) {
11802 return tag[11] === 0x17;
11803 }
11804
11805 if (FlvTag.isAudioFrame(tag)) {
11806 return true;
11807 }
11808
11809 if (FlvTag.isMetaData(tag)) {
11810 return true;
11811 }
11812
11813 return false;
11814};
11815
11816// (tag:ByteArray):uint {
11817FlvTag.frameTime = function(tag) {
11818 var pts = tag[ 4] << 16; // :uint
11819 pts |= tag[ 5] << 8;
11820 pts |= tag[ 6] << 0;
11821 pts |= tag[ 7] << 24;
11822 return pts;
11823};
11824
11825module.exports = FlvTag;
11826
11827},{}],46:[function(require,module,exports){
11828module.exports = {
11829 tag: require('./flv-tag'),
11830 Transmuxer: require('./transmuxer'),
11831 getFlvHeader: require('./flv-header')
11832};
11833
11834},{"./flv-header":44,"./flv-tag":45,"./transmuxer":48}],47:[function(require,module,exports){
11835'use strict';
11836
11837var TagList = function() {
11838 var self = this;
11839
11840 this.list = [];
11841
11842 this.push = function(tag) {
11843 this.list.push({
11844 bytes: tag.bytes,
11845 dts: tag.dts,
11846 pts: tag.pts,
11847 keyFrame: tag.keyFrame,
11848 metaDataTag: tag.metaDataTag
11849 });
11850 };
11851
11852 Object.defineProperty(this, 'length', {
11853 get: function() {
11854 return self.list.length;
11855 }
11856 });
11857};
11858
11859module.exports = TagList;
11860
11861},{}],48:[function(require,module,exports){
11862'use strict';
11863
11864var Stream = require('../utils/stream.js');
11865var FlvTag = require('./flv-tag.js');
11866var m2ts = require('../m2ts/m2ts.js');
11867var AdtsStream = require('../codecs/adts.js');
11868var H264Stream = require('../codecs/h264').H264Stream;
11869var CoalesceStream = require('./coalesce-stream.js');
11870var TagList = require('./tag-list.js');
11871
11872var
11873 Transmuxer,
11874 VideoSegmentStream,
11875 AudioSegmentStream,
11876 collectTimelineInfo,
11877 metaDataTag,
11878 extraDataTag;
11879
11880/**
11881 * Store information about the start and end of the tracka and the
11882 * duration for each frame/sample we process in order to calculate
11883 * the baseMediaDecodeTime
11884 */
11885collectTimelineInfo = function(track, data) {
11886 if (typeof data.pts === 'number') {
11887 if (track.timelineStartInfo.pts === undefined) {
11888 track.timelineStartInfo.pts = data.pts;
11889 } else {
11890 track.timelineStartInfo.pts =
11891 Math.min(track.timelineStartInfo.pts, data.pts);
11892 }
11893 }
11894
11895 if (typeof data.dts === 'number') {
11896 if (track.timelineStartInfo.dts === undefined) {
11897 track.timelineStartInfo.dts = data.dts;
11898 } else {
11899 track.timelineStartInfo.dts =
11900 Math.min(track.timelineStartInfo.dts, data.dts);
11901 }
11902 }
11903};
11904
11905metaDataTag = function(track, pts) {
11906 var
11907 tag = new FlvTag(FlvTag.METADATA_TAG); // :FlvTag
11908
11909 tag.dts = pts;
11910 tag.pts = pts;
11911
11912 tag.writeMetaDataDouble('videocodecid', 7);
11913 tag.writeMetaDataDouble('width', track.width);
11914 tag.writeMetaDataDouble('height', track.height);
11915
11916 return tag;
11917};
11918
11919extraDataTag = function(track, pts) {
11920 var
11921 i,
11922 tag = new FlvTag(FlvTag.VIDEO_TAG, true);
11923
11924 tag.dts = pts;
11925 tag.pts = pts;
11926
11927 tag.writeByte(0x01);// version
11928 tag.writeByte(track.profileIdc);// profile
11929 tag.writeByte(track.profileCompatibility);// compatibility
11930 tag.writeByte(track.levelIdc);// level
11931 tag.writeByte(0xFC | 0x03); // reserved (6 bits), NULA length size - 1 (2 bits)
11932 tag.writeByte(0xE0 | 0x01); // reserved (3 bits), num of SPS (5 bits)
11933 tag.writeShort(track.sps[0].length); // data of SPS
11934 tag.writeBytes(track.sps[0]); // SPS
11935
11936 tag.writeByte(track.pps.length); // num of PPS (will there ever be more that 1 PPS?)
11937 for (i = 0; i < track.pps.length; ++i) {
11938 tag.writeShort(track.pps[i].length); // 2 bytes for length of PPS
11939 tag.writeBytes(track.pps[i]); // data of PPS
11940 }
11941
11942 return tag;
11943};
11944
11945/**
11946 * Constructs a single-track, media segment from AAC data
11947 * events. The output of this stream can be fed to flash.
11948 */
11949AudioSegmentStream = function(track) {
11950 var
11951 adtsFrames = [],
11952 videoKeyFrames = [],
11953 oldExtraData;
11954
11955 AudioSegmentStream.prototype.init.call(this);
11956
11957 this.push = function(data) {
11958 collectTimelineInfo(track, data);
11959
11960 if (track) {
11961 track.audioobjecttype = data.audioobjecttype;
11962 track.channelcount = data.channelcount;
11963 track.samplerate = data.samplerate;
11964 track.samplingfrequencyindex = data.samplingfrequencyindex;
11965 track.samplesize = data.samplesize;
11966 track.extraData = (track.audioobjecttype << 11) |
11967 (track.samplingfrequencyindex << 7) |
11968 (track.channelcount << 3);
11969 }
11970
11971 data.pts = Math.round(data.pts / 90);
11972 data.dts = Math.round(data.dts / 90);
11973
11974 // buffer audio data until end() is called
11975 adtsFrames.push(data);
11976 };
11977
11978 this.flush = function() {
11979 var currentFrame, adtsFrame, lastMetaPts, tags = new TagList();
11980 // return early if no audio data has been observed
11981 if (adtsFrames.length === 0) {
11982 this.trigger('done', 'AudioSegmentStream');
11983 return;
11984 }
11985
11986 lastMetaPts = -Infinity;
11987
11988 while (adtsFrames.length) {
11989 currentFrame = adtsFrames.shift();
11990
11991 // write out a metadata frame at every video key frame
11992 if (videoKeyFrames.length && currentFrame.pts >= videoKeyFrames[0]) {
11993 lastMetaPts = videoKeyFrames.shift();
11994 this.writeMetaDataTags(tags, lastMetaPts);
11995 }
11996
11997 // also write out metadata tags every 1 second so that the decoder
11998 // is re-initialized quickly after seeking into a different
11999 // audio configuration.
12000 if (track.extraData !== oldExtraData || currentFrame.pts - lastMetaPts >= 1000) {
12001 this.writeMetaDataTags(tags, currentFrame.pts);
12002 oldExtraData = track.extraData;
12003 lastMetaPts = currentFrame.pts;
12004 }
12005
12006 adtsFrame = new FlvTag(FlvTag.AUDIO_TAG);
12007 adtsFrame.pts = currentFrame.pts;
12008 adtsFrame.dts = currentFrame.dts;
12009
12010 adtsFrame.writeBytes(currentFrame.data);
12011
12012 tags.push(adtsFrame.finalize());
12013 }
12014
12015 videoKeyFrames.length = 0;
12016 oldExtraData = null;
12017 this.trigger('data', {track: track, tags: tags.list});
12018
12019 this.trigger('done', 'AudioSegmentStream');
12020 };
12021
12022 this.writeMetaDataTags = function(tags, pts) {
12023 var adtsFrame;
12024
12025 adtsFrame = new FlvTag(FlvTag.METADATA_TAG);
12026 // For audio, DTS is always the same as PTS. We want to set the DTS
12027 // however so we can compare with video DTS to determine approximate
12028 // packet order
12029 adtsFrame.pts = pts;
12030 adtsFrame.dts = pts;
12031
12032 // AAC is always 10
12033 adtsFrame.writeMetaDataDouble('audiocodecid', 10);
12034 adtsFrame.writeMetaDataBoolean('stereo', track.channelcount === 2);
12035 adtsFrame.writeMetaDataDouble('audiosamplerate', track.samplerate);
12036 // Is AAC always 16 bit?
12037 adtsFrame.writeMetaDataDouble('audiosamplesize', 16);
12038
12039 tags.push(adtsFrame.finalize());
12040
12041 adtsFrame = new FlvTag(FlvTag.AUDIO_TAG, true);
12042 // For audio, DTS is always the same as PTS. We want to set the DTS
12043 // however so we can compare with video DTS to determine approximate
12044 // packet order
12045 adtsFrame.pts = pts;
12046 adtsFrame.dts = pts;
12047
12048 adtsFrame.view.setUint16(adtsFrame.position, track.extraData);
12049 adtsFrame.position += 2;
12050 adtsFrame.length = Math.max(adtsFrame.length, adtsFrame.position);
12051
12052 tags.push(adtsFrame.finalize());
12053 };
12054
12055 this.onVideoKeyFrame = function(pts) {
12056 videoKeyFrames.push(pts);
12057 };
12058};
12059AudioSegmentStream.prototype = new Stream();
12060
12061/**
12062 * Store FlvTags for the h264 stream
12063 * @param track {object} track metadata configuration
12064 */
12065VideoSegmentStream = function(track) {
12066 var
12067 nalUnits = [],
12068 config,
12069 h264Frame;
12070 VideoSegmentStream.prototype.init.call(this);
12071
12072 this.finishFrame = function(tags, frame) {
12073 if (!frame) {
12074 return;
12075 }
12076 // Check if keyframe and the length of tags.
12077 // This makes sure we write metadata on the first frame of a segment.
12078 if (config && track && track.newMetadata &&
12079 (frame.keyFrame || tags.length === 0)) {
12080 // Push extra data on every IDR frame in case we did a stream change + seek
12081 var metaTag = metaDataTag(config, frame.dts).finalize();
12082 var extraTag = extraDataTag(track, frame.dts).finalize();
12083
12084 metaTag.metaDataTag = extraTag.metaDataTag = true;
12085
12086 tags.push(metaTag);
12087 tags.push(extraTag);
12088 track.newMetadata = false;
12089
12090 this.trigger('keyframe', frame.dts);
12091 }
12092
12093 frame.endNalUnit();
12094 tags.push(frame.finalize());
12095 h264Frame = null;
12096 };
12097
12098 this.push = function(data) {
12099 collectTimelineInfo(track, data);
12100
12101 data.pts = Math.round(data.pts / 90);
12102 data.dts = Math.round(data.dts / 90);
12103
12104 // buffer video until flush() is called
12105 nalUnits.push(data);
12106 };
12107
12108 this.flush = function() {
12109 var
12110 currentNal,
12111 tags = new TagList();
12112
12113 // Throw away nalUnits at the start of the byte stream until we find
12114 // the first AUD
12115 while (nalUnits.length) {
12116 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
12117 break;
12118 }
12119 nalUnits.shift();
12120 }
12121
12122 // return early if no video data has been observed
12123 if (nalUnits.length === 0) {
12124 this.trigger('done', 'VideoSegmentStream');
12125 return;
12126 }
12127
12128 while (nalUnits.length) {
12129 currentNal = nalUnits.shift();
12130
12131 // record the track config
12132 if (currentNal.nalUnitType === 'seq_parameter_set_rbsp') {
12133 track.newMetadata = true;
12134 config = currentNal.config;
12135 track.width = config.width;
12136 track.height = config.height;
12137 track.sps = [currentNal.data];
12138 track.profileIdc = config.profileIdc;
12139 track.levelIdc = config.levelIdc;
12140 track.profileCompatibility = config.profileCompatibility;
12141 h264Frame.endNalUnit();
12142 } else if (currentNal.nalUnitType === 'pic_parameter_set_rbsp') {
12143 track.newMetadata = true;
12144 track.pps = [currentNal.data];
12145 h264Frame.endNalUnit();
12146 } else if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
12147 if (h264Frame) {
12148 this.finishFrame(tags, h264Frame);
12149 }
12150 h264Frame = new FlvTag(FlvTag.VIDEO_TAG);
12151 h264Frame.pts = currentNal.pts;
12152 h264Frame.dts = currentNal.dts;
12153 } else {
12154 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
12155 // the current sample is a key frame
12156 h264Frame.keyFrame = true;
12157 }
12158 h264Frame.endNalUnit();
12159 }
12160 h264Frame.startNalUnit();
12161 h264Frame.writeBytes(currentNal.data);
12162 }
12163 if (h264Frame) {
12164 this.finishFrame(tags, h264Frame);
12165 }
12166
12167 this.trigger('data', {track: track, tags: tags.list});
12168
12169 // Continue with the flush process now
12170 this.trigger('done', 'VideoSegmentStream');
12171 };
12172};
12173
12174VideoSegmentStream.prototype = new Stream();
12175
12176/**
12177 * An object that incrementally transmuxes MPEG2 Trasport Stream
12178 * chunks into an FLV.
12179 */
12180Transmuxer = function(options) {
12181 var
12182 self = this,
12183
12184 packetStream, parseStream, elementaryStream,
12185 videoTimestampRolloverStream, audioTimestampRolloverStream,
12186 timedMetadataTimestampRolloverStream,
12187 adtsStream, h264Stream,
12188 videoSegmentStream, audioSegmentStream, captionStream,
12189 coalesceStream;
12190
12191 Transmuxer.prototype.init.call(this);
12192
12193 options = options || {};
12194
12195 // expose the metadata stream
12196 this.metadataStream = new m2ts.MetadataStream();
12197
12198 options.metadataStream = this.metadataStream;
12199
12200 // set up the parsing pipeline
12201 packetStream = new m2ts.TransportPacketStream();
12202 parseStream = new m2ts.TransportParseStream();
12203 elementaryStream = new m2ts.ElementaryStream();
12204 videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
12205 audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
12206 timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
12207
12208 adtsStream = new AdtsStream();
12209 h264Stream = new H264Stream();
12210 coalesceStream = new CoalesceStream(options);
12211
12212 // disassemble MPEG2-TS packets into elementary streams
12213 packetStream
12214 .pipe(parseStream)
12215 .pipe(elementaryStream);
12216
12217 // !!THIS ORDER IS IMPORTANT!!
12218 // demux the streams
12219 elementaryStream
12220 .pipe(videoTimestampRolloverStream)
12221 .pipe(h264Stream);
12222 elementaryStream
12223 .pipe(audioTimestampRolloverStream)
12224 .pipe(adtsStream);
12225
12226 elementaryStream
12227 .pipe(timedMetadataTimestampRolloverStream)
12228 .pipe(this.metadataStream)
12229 .pipe(coalesceStream);
12230 // if CEA-708 parsing is available, hook up a caption stream
12231 captionStream = new m2ts.CaptionStream();
12232 h264Stream.pipe(captionStream)
12233 .pipe(coalesceStream);
12234
12235 // hook up the segment streams once track metadata is delivered
12236 elementaryStream.on('data', function(data) {
12237 var i, videoTrack, audioTrack;
12238
12239 if (data.type === 'metadata') {
12240 i = data.tracks.length;
12241
12242 // scan the tracks listed in the metadata
12243 while (i--) {
12244 if (data.tracks[i].type === 'video') {
12245 videoTrack = data.tracks[i];
12246 } else if (data.tracks[i].type === 'audio') {
12247 audioTrack = data.tracks[i];
12248 }
12249 }
12250
12251 // hook up the video segment stream to the first track with h264 data
12252 if (videoTrack && !videoSegmentStream) {
12253 coalesceStream.numberOfTracks++;
12254 videoSegmentStream = new VideoSegmentStream(videoTrack);
12255
12256 // Set up the final part of the video pipeline
12257 h264Stream
12258 .pipe(videoSegmentStream)
12259 .pipe(coalesceStream);
12260 }
12261
12262 if (audioTrack && !audioSegmentStream) {
12263 // hook up the audio segment stream to the first track with aac data
12264 coalesceStream.numberOfTracks++;
12265 audioSegmentStream = new AudioSegmentStream(audioTrack);
12266
12267 // Set up the final part of the audio pipeline
12268 adtsStream
12269 .pipe(audioSegmentStream)
12270 .pipe(coalesceStream);
12271
12272 if (videoSegmentStream) {
12273 videoSegmentStream.on('keyframe', audioSegmentStream.onVideoKeyFrame);
12274 }
12275 }
12276 }
12277 });
12278
12279 // feed incoming data to the front of the parsing pipeline
12280 this.push = function(data) {
12281 packetStream.push(data);
12282 };
12283
12284 // flush any buffered data
12285 this.flush = function() {
12286 // Start at the top of the pipeline and flush all pending work
12287 packetStream.flush();
12288 };
12289
12290 // Caption data has to be reset when seeking outside buffered range
12291 this.resetCaptions = function() {
12292 captionStream.reset();
12293 };
12294
12295 // Re-emit any data coming from the coalesce stream to the outside world
12296 coalesceStream.on('data', function(event) {
12297 self.trigger('data', event);
12298 });
12299
12300 // Let the consumer know we have finished flushing the entire pipeline
12301 coalesceStream.on('done', function() {
12302 self.trigger('done');
12303 });
12304};
12305Transmuxer.prototype = new Stream();
12306
12307// forward compatibility
12308module.exports = Transmuxer;
12309
12310},{"../codecs/adts.js":40,"../codecs/h264":41,"../m2ts/m2ts.js":50,"../utils/stream.js":62,"./coalesce-stream.js":43,"./flv-tag.js":45,"./tag-list.js":47}],49:[function(require,module,exports){
12311/**
12312 * mux.js
12313 *
12314 * Copyright (c) 2015 Brightcove
12315 * All rights reserved.
12316 *
12317 * Reads in-band caption information from a video elementary
12318 * stream. Captions must follow the CEA-708 standard for injection
12319 * into an MPEG-2 transport streams.
12320 * @see https://en.wikipedia.org/wiki/CEA-708
12321 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
12322 */
12323
12324'use strict';
12325
12326// -----------------
12327// Link To Transport
12328// -----------------
12329
12330// Supplemental enhancement information (SEI) NAL units have a
12331// payload type field to indicate how they are to be
12332// interpreted. CEAS-708 caption content is always transmitted with
12333// payload type 0x04.
12334var USER_DATA_REGISTERED_ITU_T_T35 = 4,
12335 RBSP_TRAILING_BITS = 128,
12336 Stream = require('../utils/stream');
12337
12338/**
12339 * Parse a supplemental enhancement information (SEI) NAL unit.
12340 * Stops parsing once a message of type ITU T T35 has been found.
12341 *
12342 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
12343 * @return {object} the parsed SEI payload
12344 * @see Rec. ITU-T H.264, 7.3.2.3.1
12345 */
12346var parseSei = function(bytes) {
12347 var
12348 i = 0,
12349 result = {
12350 payloadType: -1,
12351 payloadSize: 0
12352 },
12353 payloadType = 0,
12354 payloadSize = 0;
12355
12356 // go through the sei_rbsp parsing each each individual sei_message
12357 while (i < bytes.byteLength) {
12358 // stop once we have hit the end of the sei_rbsp
12359 if (bytes[i] === RBSP_TRAILING_BITS) {
12360 break;
12361 }
12362
12363 // Parse payload type
12364 while (bytes[i] === 0xFF) {
12365 payloadType += 255;
12366 i++;
12367 }
12368 payloadType += bytes[i++];
12369
12370 // Parse payload size
12371 while (bytes[i] === 0xFF) {
12372 payloadSize += 255;
12373 i++;
12374 }
12375 payloadSize += bytes[i++];
12376
12377 // this sei_message is a 608/708 caption so save it and break
12378 // there can only ever be one caption message in a frame's sei
12379 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
12380 result.payloadType = payloadType;
12381 result.payloadSize = payloadSize;
12382 result.payload = bytes.subarray(i, i + payloadSize);
12383 break;
12384 }
12385
12386 // skip the payload and parse the next message
12387 i += payloadSize;
12388 payloadType = 0;
12389 payloadSize = 0;
12390 }
12391
12392 return result;
12393};
12394
12395// see ANSI/SCTE 128-1 (2013), section 8.1
12396var parseUserData = function(sei) {
12397 // itu_t_t35_contry_code must be 181 (United States) for
12398 // captions
12399 if (sei.payload[0] !== 181) {
12400 return null;
12401 }
12402
12403 // itu_t_t35_provider_code should be 49 (ATSC) for captions
12404 if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
12405 return null;
12406 }
12407
12408 // the user_identifier should be "GA94" to indicate ATSC1 data
12409 if (String.fromCharCode(sei.payload[3],
12410 sei.payload[4],
12411 sei.payload[5],
12412 sei.payload[6]) !== 'GA94') {
12413 return null;
12414 }
12415
12416 // finally, user_data_type_code should be 0x03 for caption data
12417 if (sei.payload[7] !== 0x03) {
12418 return null;
12419 }
12420
12421 // return the user_data_type_structure and strip the trailing
12422 // marker bits
12423 return sei.payload.subarray(8, sei.payload.length - 1);
12424};
12425
12426// see CEA-708-D, section 4.4
12427var parseCaptionPackets = function(pts, userData) {
12428 var results = [], i, count, offset, data;
12429
12430 // if this is just filler, return immediately
12431 if (!(userData[0] & 0x40)) {
12432 return results;
12433 }
12434
12435 // parse out the cc_data_1 and cc_data_2 fields
12436 count = userData[0] & 0x1f;
12437 for (i = 0; i < count; i++) {
12438 offset = i * 3;
12439 data = {
12440 type: userData[offset + 2] & 0x03,
12441 pts: pts
12442 };
12443
12444 // capture cc data when cc_valid is 1
12445 if (userData[offset + 2] & 0x04) {
12446 data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
12447 results.push(data);
12448 }
12449 }
12450 return results;
12451};
12452
12453var packetDropper = {
12454 push: function() {}
12455};
12456
12457var CaptionStream = function() {
12458
12459 CaptionStream.prototype.init.call(this);
12460
12461 this.captionPackets_ = [];
12462
12463 this.ccStreams_ = [
12464 new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
12465 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
12466 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
12467 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
12468 ];
12469
12470 this.reset();
12471
12472 // forward data and done events from CCs to this CaptionStream
12473 this.ccStreams_.forEach(function(cc) {
12474 cc.on('data', this.trigger.bind(this, 'data'));
12475 cc.on('done', this.trigger.bind(this, 'done'));
12476 }, this);
12477
12478};
12479
12480CaptionStream.prototype = new Stream();
12481CaptionStream.prototype.push = function(event) {
12482 var sei, userData;
12483
12484 // only examine SEI NALs
12485 if (event.nalUnitType !== 'sei_rbsp') {
12486 return;
12487 }
12488
12489 // parse the sei
12490 sei = parseSei(event.escapedRBSP);
12491
12492 // ignore everything but user_data_registered_itu_t_t35
12493 if (sei.payloadType !== USER_DATA_REGISTERED_ITU_T_T35) {
12494 return;
12495 }
12496
12497 // parse out the user data payload
12498 userData = parseUserData(sei);
12499
12500 // ignore unrecognized userData
12501 if (!userData) {
12502 return;
12503 }
12504
12505 // Sometimes, the same segment # will be downloaded twice. To stop the
12506 // caption data from being processed twice, we track the latest dts we've
12507 // received and ignore everything with a dts before that. However, since
12508 // data for a specific dts can be split across 2 packets on either side of
12509 // a segment boundary, we need to make sure we *don't* ignore the second
12510 // dts packet we receive that has dts === this.latestDts_. And thus, the
12511 // ignoreNextEqualDts_ flag was born.
12512 if (event.dts < this.latestDts_) {
12513 // We've started getting older data, so set the flag.
12514 this.ignoreNextEqualDts_ = true;
12515 return;
12516 } else if ((event.dts === this.latestDts_) && (this.ignoreNextEqualDts_)) {
12517 // We've received the last duplicate packet, time to start processing again
12518 this.ignoreNextEqualDts_ = false;
12519 return;
12520 }
12521
12522 // parse out CC data packets and save them for later
12523 this.captionPackets_ = this.captionPackets_.concat(parseCaptionPackets(event.pts, userData));
12524 this.latestDts_ = event.dts;
12525};
12526
12527CaptionStream.prototype.flush = function() {
12528 // make sure we actually parsed captions before proceeding
12529 if (!this.captionPackets_.length) {
12530 this.ccStreams_.forEach(function(cc) {
12531 cc.flush();
12532 }, this);
12533 return;
12534 }
12535
12536 // In Chrome, the Array#sort function is not stable so add a
12537 // presortIndex that we can use to ensure we get a stable-sort
12538 this.captionPackets_.forEach(function(elem, idx) {
12539 elem.presortIndex = idx;
12540 });
12541
12542 // sort caption byte-pairs based on their PTS values
12543 this.captionPackets_.sort(function(a, b) {
12544 if (a.pts === b.pts) {
12545 return a.presortIndex - b.presortIndex;
12546 }
12547 return a.pts - b.pts;
12548 });
12549
12550 this.captionPackets_.forEach(function(packet) {
12551 if (packet.type < 2) {
12552 // Dispatch packet to the right Cea608Stream
12553 this.dispatchCea608Packet(packet);
12554 }
12555 // this is where an 'else' would go for a dispatching packets
12556 // to a theoretical Cea708Stream that handles SERVICEn data
12557 }, this);
12558
12559 this.captionPackets_.length = 0;
12560 this.ccStreams_.forEach(function(cc) {
12561 cc.flush();
12562 }, this);
12563 return;
12564};
12565
12566CaptionStream.prototype.reset = function() {
12567 this.latestDts_ = null;
12568 this.ignoreNextEqualDts_ = false;
12569 this.activeCea608Channel_ = null;
12570 // Since we don't know which channel is active until we get a control
12571 // code that sets it, we start off with CEA608 handlers that just drop
12572 // all the packets.
12573 this.activeCea608Streams_ = [
12574 packetDropper,
12575 packetDropper
12576 ];
12577 this.ccStreams_.forEach(function(ccStream) {
12578 ccStream.reset();
12579 });
12580};
12581
12582CaptionStream.prototype.dispatchCea608Packet = function(packet) {
12583 if (this.setsChannel1Active(packet) && this.activeCea608Channel_ !== 1) {
12584 this.activeCea608Channel_ = 1;
12585 this.activeCea608Streams_ = [this.ccStreams_[0], this.ccStreams_[2]]; // CC1, CC3
12586 } else if (this.setsChannel2Active(packet) && this.activeCea608Channel_ !== 2) {
12587 this.activeCea608Channel_ = 2;
12588 this.activeCea608Streams_ = [this.ccStreams_[1], this.ccStreams_[3]]; // CC2, CC4
12589 }
12590 // If we haven't set the active streams yet, this next call just returns
12591 // immediately.
12592 this.activeCea608Streams_[packet.type].push(packet);
12593};
12594
12595CaptionStream.prototype.setsChannel1Active = function(packet) {
12596 return ((packet.ccData & 0x7800) === 0x1000);
12597};
12598CaptionStream.prototype.setsChannel2Active = function(packet) {
12599 return ((packet.ccData & 0x7800) === 0x1800);
12600};
12601
12602// ----------------------
12603// Session to Application
12604// ----------------------
12605
12606var CHARACTER_TRANSLATION = {
12607 0x2a: 0xe1, // á
12608 0x5c: 0xe9, // é
12609 0x5e: 0xed, // í
12610 0x5f: 0xf3, // ó
12611 0x60: 0xfa, // ú
12612 0x7b: 0xe7, // ç
12613 0x7c: 0xf7, // ÷
12614 0x7d: 0xd1, // Ñ
12615 0x7e: 0xf1, // ñ
12616 0x7f: 0x2588, // █
12617 0x0130: 0xae, // ®
12618 0x0131: 0xb0, // °
12619 0x0132: 0xbd, // ½
12620 0x0133: 0xbf, // ¿
12621 0x0134: 0x2122, // ™
12622 0x0135: 0xa2, // ¢
12623 0x0136: 0xa3, // £
12624 0x0137: 0x266a, // ♪
12625 0x0138: 0xe0, // à
12626 0x0139: 0xa0, //
12627 0x013a: 0xe8, // è
12628 0x013b: 0xe2, // â
12629 0x013c: 0xea, // ê
12630 0x013d: 0xee, // î
12631 0x013e: 0xf4, // ô
12632 0x013f: 0xfb, // û
12633 0x0220: 0xc1, // Á
12634 0x0221: 0xc9, // É
12635 0x0222: 0xd3, // Ó
12636 0x0223: 0xda, // Ú
12637 0x0224: 0xdc, // Ü
12638 0x0225: 0xfc, // ü
12639 0x0226: 0x2018, // ‘
12640 0x0227: 0xa1, // ¡
12641 0x0228: 0x2a, // *
12642 0x0229: 0x27, // '
12643 0x022a: 0x2014, // —
12644 0x022b: 0xa9, // ©
12645 0x022c: 0x2120, // ℠
12646 0x022d: 0x2022, // •
12647 0x022e: 0x201c, // “
12648 0x022f: 0x201d, // ”
12649 0x0230: 0xc0, // À
12650 0x0231: 0xc2, // Â
12651 0x0232: 0xc7, // Ç
12652 0x0233: 0xc8, // È
12653 0x0234: 0xca, // Ê
12654 0x0235: 0xcb, // Ë
12655 0x0236: 0xeb, // ë
12656 0x0237: 0xce, // Î
12657 0x0238: 0xcf, // Ï
12658 0x0239: 0xef, // ï
12659 0x023a: 0xd4, // Ô
12660 0x023b: 0xd9, // Ù
12661 0x023c: 0xf9, // ù
12662 0x023d: 0xdb, // Û
12663 0x023e: 0xab, // «
12664 0x023f: 0xbb, // »
12665 0x0320: 0xc3, // Ã
12666 0x0321: 0xe3, // ã
12667 0x0322: 0xcd, // Í
12668 0x0323: 0xcc, // Ì
12669 0x0324: 0xec, // ì
12670 0x0325: 0xd2, // Ò
12671 0x0326: 0xf2, // ò
12672 0x0327: 0xd5, // Õ
12673 0x0328: 0xf5, // õ
12674 0x0329: 0x7b, // {
12675 0x032a: 0x7d, // }
12676 0x032b: 0x5c, // \
12677 0x032c: 0x5e, // ^
12678 0x032d: 0x5f, // _
12679 0x032e: 0x7c, // |
12680 0x032f: 0x7e, // ~
12681 0x0330: 0xc4, // Ä
12682 0x0331: 0xe4, // ä
12683 0x0332: 0xd6, // Ö
12684 0x0333: 0xf6, // ö
12685 0x0334: 0xdf, // ß
12686 0x0335: 0xa5, // ¥
12687 0x0336: 0xa4, // ¤
12688 0x0337: 0x2502, // │
12689 0x0338: 0xc5, // Å
12690 0x0339: 0xe5, // å
12691 0x033a: 0xd8, // Ø
12692 0x033b: 0xf8, // ø
12693 0x033c: 0x250c, // ┌
12694 0x033d: 0x2510, // ┐
12695 0x033e: 0x2514, // └
12696 0x033f: 0x2518 // ┘
12697};
12698
12699var getCharFromCode = function(code) {
12700 if (code === null) {
12701 return '';
12702 }
12703 code = CHARACTER_TRANSLATION[code] || code;
12704 return String.fromCharCode(code);
12705};
12706
12707// the index of the last row in a CEA-608 display buffer
12708var BOTTOM_ROW = 14;
12709
12710// This array is used for mapping PACs -> row #, since there's no way of
12711// getting it through bit logic.
12712var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620,
12713 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
12714
12715// CEA-608 captions are rendered onto a 34x15 matrix of character
12716// cells. The "bottom" row is the last element in the outer array.
12717var createDisplayBuffer = function() {
12718 var result = [], i = BOTTOM_ROW + 1;
12719 while (i--) {
12720 result.push('');
12721 }
12722 return result;
12723};
12724
12725var Cea608Stream = function(field, dataChannel) {
12726 Cea608Stream.prototype.init.call(this);
12727
12728 this.field_ = field || 0;
12729 this.dataChannel_ = dataChannel || 0;
12730
12731 this.name_ = 'CC' + (((this.field_ << 1) | this.dataChannel_) + 1);
12732
12733 this.setConstants();
12734 this.reset();
12735
12736 this.push = function(packet) {
12737 var data, swap, char0, char1, text;
12738 // remove the parity bits
12739 data = packet.ccData & 0x7f7f;
12740
12741 // ignore duplicate control codes; the spec demands they're sent twice
12742 if (data === this.lastControlCode_) {
12743 this.lastControlCode_ = null;
12744 return;
12745 }
12746
12747 // Store control codes
12748 if ((data & 0xf000) === 0x1000) {
12749 this.lastControlCode_ = data;
12750 } else if (data !== this.PADDING_) {
12751 this.lastControlCode_ = null;
12752 }
12753
12754 char0 = data >>> 8;
12755 char1 = data & 0xff;
12756
12757 if (data === this.PADDING_) {
12758 return;
12759
12760 } else if (data === this.RESUME_CAPTION_LOADING_) {
12761 this.mode_ = 'popOn';
12762
12763 } else if (data === this.END_OF_CAPTION_) {
12764 this.clearFormatting(packet.pts);
12765 // if a caption was being displayed, it's gone now
12766 this.flushDisplayed(packet.pts);
12767
12768 // flip memory
12769 swap = this.displayed_;
12770 this.displayed_ = this.nonDisplayed_;
12771 this.nonDisplayed_ = swap;
12772
12773 // start measuring the time to display the caption
12774 this.startPts_ = packet.pts;
12775
12776 } else if (data === this.ROLL_UP_2_ROWS_) {
12777 this.topRow_ = BOTTOM_ROW - 1;
12778 this.mode_ = 'rollUp';
12779 } else if (data === this.ROLL_UP_3_ROWS_) {
12780 this.topRow_ = BOTTOM_ROW - 2;
12781 this.mode_ = 'rollUp';
12782 } else if (data === this.ROLL_UP_4_ROWS_) {
12783 this.topRow_ = BOTTOM_ROW - 3;
12784 this.mode_ = 'rollUp';
12785 } else if (data === this.CARRIAGE_RETURN_) {
12786 this.clearFormatting(packet.pts);
12787 this.flushDisplayed(packet.pts);
12788 this.shiftRowsUp_();
12789 this.startPts_ = packet.pts;
12790
12791 } else if (data === this.BACKSPACE_) {
12792 if (this.mode_ === 'popOn') {
12793 this.nonDisplayed_[BOTTOM_ROW] = this.nonDisplayed_[BOTTOM_ROW].slice(0, -1);
12794 } else {
12795 this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
12796 }
12797 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
12798 this.flushDisplayed(packet.pts);
12799 this.displayed_ = createDisplayBuffer();
12800 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
12801 this.nonDisplayed_ = createDisplayBuffer();
12802
12803 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
12804 this.mode_ = 'paintOn';
12805
12806 // Append special characters to caption text
12807 } else if (this.isSpecialCharacter(char0, char1)) {
12808 // Bitmask char0 so that we can apply character transformations
12809 // regardless of field and data channel.
12810 // Then byte-shift to the left and OR with char1 so we can pass the
12811 // entire character code to `getCharFromCode`.
12812 char0 = (char0 & 0x03) << 8;
12813 text = getCharFromCode(char0 | char1);
12814 this[this.mode_](packet.pts, text);
12815 this.column_++;
12816
12817 // Append extended characters to caption text
12818 } else if (this.isExtCharacter(char0, char1)) {
12819 // Extended characters always follow their "non-extended" equivalents.
12820 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
12821 // decoders are supposed to drop the "è", while compliant decoders
12822 // backspace the "e" and insert "è".
12823
12824 // Delete the previous character
12825 if (this.mode_ === 'popOn') {
12826 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
12827 } else {
12828 this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
12829 }
12830
12831 // Bitmask char0 so that we can apply character transformations
12832 // regardless of field and data channel.
12833 // Then byte-shift to the left and OR with char1 so we can pass the
12834 // entire character code to `getCharFromCode`.
12835 char0 = (char0 & 0x03) << 8;
12836 text = getCharFromCode(char0 | char1);
12837 this[this.mode_](packet.pts, text);
12838 this.column_++;
12839
12840 // Process mid-row codes
12841 } else if (this.isMidRowCode(char0, char1)) {
12842 // Attributes are not additive, so clear all formatting
12843 this.clearFormatting(packet.pts);
12844
12845 // According to the standard, mid-row codes
12846 // should be replaced with spaces, so add one now
12847 this[this.mode_](packet.pts, ' ');
12848 this.column_++;
12849
12850 if ((char1 & 0xe) === 0xe) {
12851 this.addFormatting(packet.pts, ['i']);
12852 }
12853
12854 if ((char1 & 0x1) === 0x1) {
12855 this.addFormatting(packet.pts, ['u']);
12856 }
12857
12858 // Detect offset control codes and adjust cursor
12859 } else if (this.isOffsetControlCode(char0, char1)) {
12860 // Cursor position is set by indent PAC (see below) in 4-column
12861 // increments, with an additional offset code of 1-3 to reach any
12862 // of the 32 columns specified by CEA-608. So all we need to do
12863 // here is increment the column cursor by the given offset.
12864 this.column_ += (char1 & 0x03);
12865
12866 // Detect PACs (Preamble Address Codes)
12867 } else if (this.isPAC(char0, char1)) {
12868
12869 // There's no logic for PAC -> row mapping, so we have to just
12870 // find the row code in an array and use its index :(
12871 var row = ROWS.indexOf(data & 0x1f20);
12872
12873 if (row !== this.row_) {
12874 // formatting is only persistent for current row
12875 this.clearFormatting(packet.pts);
12876 this.row_ = row;
12877 }
12878 // All PACs can apply underline, so detect and apply
12879 // (All odd-numbered second bytes set underline)
12880 if ((char1 & 0x1) && (this.formatting_.indexOf('u') === -1)) {
12881 this.addFormatting(packet.pts, ['u']);
12882 }
12883
12884 if ((data & 0x10) === 0x10) {
12885 // We've got an indent level code. Each successive even number
12886 // increments the column cursor by 4, so we can get the desired
12887 // column position by bit-shifting to the right (to get n/2)
12888 // and multiplying by 4.
12889 this.column_ = ((data & 0xe) >> 1) * 4;
12890 }
12891
12892 if (this.isColorPAC(char1)) {
12893 // it's a color code, though we only support white, which
12894 // can be either normal or italicized. white italics can be
12895 // either 0x4e or 0x6e depending on the row, so we just
12896 // bitwise-and with 0xe to see if italics should be turned on
12897 if ((char1 & 0xe) === 0xe) {
12898 this.addFormatting(packet.pts, ['i']);
12899 }
12900 }
12901
12902 // We have a normal character in char0, and possibly one in char1
12903 } else if (this.isNormalChar(char0)) {
12904 if (char1 === 0x00) {
12905 char1 = null;
12906 }
12907 text = getCharFromCode(char0);
12908 text += getCharFromCode(char1);
12909 this[this.mode_](packet.pts, text);
12910 this.column_ += text.length;
12911
12912 } // finish data processing
12913
12914 };
12915};
12916Cea608Stream.prototype = new Stream();
12917// Trigger a cue point that captures the current state of the
12918// display buffer
12919Cea608Stream.prototype.flushDisplayed = function(pts) {
12920 var content = this.displayed_
12921 // remove spaces from the start and end of the string
12922 .map(function(row) {
12923 return row.trim();
12924 })
12925 // combine all text rows to display in one cue
12926 .join('\n')
12927 // and remove blank rows from the start and end, but not the middle
12928 .replace(/^\n+|\n+$/g, '');
12929
12930 if (content.length) {
12931 this.trigger('data', {
12932 startPts: this.startPts_,
12933 endPts: pts,
12934 text: content,
12935 stream: this.name_
12936 });
12937 }
12938};
12939
12940/**
12941 * Zero out the data, used for startup and on seek
12942 */
12943Cea608Stream.prototype.reset = function() {
12944 this.mode_ = 'popOn';
12945 // When in roll-up mode, the index of the last row that will
12946 // actually display captions. If a caption is shifted to a row
12947 // with a lower index than this, it is cleared from the display
12948 // buffer
12949 this.topRow_ = 0;
12950 this.startPts_ = 0;
12951 this.displayed_ = createDisplayBuffer();
12952 this.nonDisplayed_ = createDisplayBuffer();
12953 this.lastControlCode_ = null;
12954
12955 // Track row and column for proper line-breaking and spacing
12956 this.column_ = 0;
12957 this.row_ = BOTTOM_ROW;
12958
12959 // This variable holds currently-applied formatting
12960 this.formatting_ = [];
12961};
12962
12963/**
12964 * Sets up control code and related constants for this instance
12965 */
12966Cea608Stream.prototype.setConstants = function() {
12967 // The following attributes have these uses:
12968 // ext_ : char0 for mid-row codes, and the base for extended
12969 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
12970 // extended codes)
12971 // control_: char0 for control codes, except byte-shifted to the
12972 // left so that we can do this.control_ | CONTROL_CODE
12973 // offset_: char0 for tab offset codes
12974 //
12975 // It's also worth noting that control codes, and _only_ control codes,
12976 // differ between field 1 and field2. Field 2 control codes are always
12977 // their field 1 value plus 1. That's why there's the "| field" on the
12978 // control value.
12979 if (this.dataChannel_ === 0) {
12980 this.BASE_ = 0x10;
12981 this.EXT_ = 0x11;
12982 this.CONTROL_ = (0x14 | this.field_) << 8;
12983 this.OFFSET_ = 0x17;
12984 } else if (this.dataChannel_ === 1) {
12985 this.BASE_ = 0x18;
12986 this.EXT_ = 0x19;
12987 this.CONTROL_ = (0x1c | this.field_) << 8;
12988 this.OFFSET_ = 0x1f;
12989 }
12990
12991 // Constants for the LSByte command codes recognized by Cea608Stream. This
12992 // list is not exhaustive. For a more comprehensive listing and semantics see
12993 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
12994 // Padding
12995 this.PADDING_ = 0x0000;
12996 // Pop-on Mode
12997 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
12998 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
12999 // Roll-up Mode
13000 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
13001 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
13002 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
13003 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
13004 // paint-on mode (not supported)
13005 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
13006 // Erasure
13007 this.BACKSPACE_ = this.CONTROL_ | 0x21;
13008 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
13009 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
13010};
13011
13012/**
13013 * Detects if the 2-byte packet data is a special character
13014 *
13015 * Special characters have a second byte in the range 0x30 to 0x3f,
13016 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
13017 * data channel 2).
13018 *
13019 * @param {Integer} char0 The first byte
13020 * @param {Integer} char1 The second byte
13021 * @return {Boolean} Whether the 2 bytes are an special character
13022 */
13023Cea608Stream.prototype.isSpecialCharacter = function(char0, char1) {
13024 return (char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f);
13025};
13026
13027/**
13028 * Detects if the 2-byte packet data is an extended character
13029 *
13030 * Extended characters have a second byte in the range 0x20 to 0x3f,
13031 * with the first byte being 0x12 or 0x13 (for data channel 1) or
13032 * 0x1a or 0x1b (for data channel 2).
13033 *
13034 * @param {Integer} char0 The first byte
13035 * @param {Integer} char1 The second byte
13036 * @return {Boolean} Whether the 2 bytes are an extended character
13037 */
13038Cea608Stream.prototype.isExtCharacter = function(char0, char1) {
13039 return ((char0 === (this.EXT_ + 1) || char0 === (this.EXT_ + 2)) &&
13040 (char1 >= 0x20 && char1 <= 0x3f));
13041};
13042
13043/**
13044 * Detects if the 2-byte packet is a mid-row code
13045 *
13046 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
13047 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
13048 * channel 2).
13049 *
13050 * @param {Integer} char0 The first byte
13051 * @param {Integer} char1 The second byte
13052 * @return {Boolean} Whether the 2 bytes are a mid-row code
13053 */
13054Cea608Stream.prototype.isMidRowCode = function(char0, char1) {
13055 return (char0 === this.EXT_ && (char1 >= 0x20 && char1 <= 0x2f));
13056};
13057
13058/**
13059 * Detects if the 2-byte packet is an offset control code
13060 *
13061 * Offset control codes have a second byte in the range 0x21 to 0x23,
13062 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
13063 * data channel 2).
13064 *
13065 * @param {Integer} char0 The first byte
13066 * @param {Integer} char1 The second byte
13067 * @return {Boolean} Whether the 2 bytes are an offset control code
13068 */
13069Cea608Stream.prototype.isOffsetControlCode = function(char0, char1) {
13070 return (char0 === this.OFFSET_ && (char1 >= 0x21 && char1 <= 0x23));
13071};
13072
13073/**
13074 * Detects if the 2-byte packet is a Preamble Address Code
13075 *
13076 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
13077 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
13078 * range 0x40 to 0x7f.
13079 *
13080 * @param {Integer} char0 The first byte
13081 * @param {Integer} char1 The second byte
13082 * @return {Boolean} Whether the 2 bytes are a PAC
13083 */
13084Cea608Stream.prototype.isPAC = function(char0, char1) {
13085 return (char0 >= this.BASE_ && char0 < (this.BASE_ + 8) &&
13086 (char1 >= 0x40 && char1 <= 0x7f));
13087};
13088
13089/**
13090 * Detects if a packet's second byte is in the range of a PAC color code
13091 *
13092 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
13093 * 0x60 to 0x6f.
13094 *
13095 * @param {Integer} char1 The second byte
13096 * @return {Boolean} Whether the byte is a color PAC
13097 */
13098Cea608Stream.prototype.isColorPAC = function(char1) {
13099 return ((char1 >= 0x40 && char1 <= 0x4f) || (char1 >= 0x60 && char1 <= 0x7f));
13100};
13101
13102/**
13103 * Detects if a single byte is in the range of a normal character
13104 *
13105 * Normal text bytes are in the range 0x20 to 0x7f.
13106 *
13107 * @param {Integer} char The byte
13108 * @return {Boolean} Whether the byte is a normal character
13109 */
13110Cea608Stream.prototype.isNormalChar = function(char) {
13111 return (char >= 0x20 && char <= 0x7f);
13112};
13113
13114// Adds the opening HTML tag for the passed character to the caption text,
13115// and keeps track of it for later closing
13116Cea608Stream.prototype.addFormatting = function(pts, format) {
13117 this.formatting_ = this.formatting_.concat(format);
13118 var text = format.reduce(function(text, format) {
13119 return text + '<' + format + '>';
13120 }, '');
13121 this[this.mode_](pts, text);
13122};
13123
13124// Adds HTML closing tags for current formatting to caption text and
13125// clears remembered formatting
13126Cea608Stream.prototype.clearFormatting = function(pts) {
13127 if (!this.formatting_.length) {
13128 return;
13129 }
13130 var text = this.formatting_.reverse().reduce(function(text, format) {
13131 return text + '</' + format + '>';
13132 }, '');
13133 this.formatting_ = [];
13134 this[this.mode_](pts, text);
13135};
13136
13137// Mode Implementations
13138Cea608Stream.prototype.popOn = function(pts, text) {
13139 var baseRow = this.nonDisplayed_[this.row_];
13140
13141 // buffer characters
13142 baseRow += text;
13143 this.nonDisplayed_[this.row_] = baseRow;
13144};
13145
13146Cea608Stream.prototype.rollUp = function(pts, text) {
13147 var baseRow = this.displayed_[BOTTOM_ROW];
13148
13149 baseRow += text;
13150 this.displayed_[BOTTOM_ROW] = baseRow;
13151
13152};
13153
13154Cea608Stream.prototype.shiftRowsUp_ = function() {
13155 var i;
13156 // clear out inactive rows
13157 for (i = 0; i < this.topRow_; i++) {
13158 this.displayed_[i] = '';
13159 }
13160 // shift displayed rows up
13161 for (i = this.topRow_; i < BOTTOM_ROW; i++) {
13162 this.displayed_[i] = this.displayed_[i + 1];
13163 }
13164 // clear out the bottom row
13165 this.displayed_[BOTTOM_ROW] = '';
13166};
13167
13168// paintOn mode is not implemented
13169Cea608Stream.prototype.paintOn = function() {};
13170
13171// exports
13172module.exports = {
13173 CaptionStream: CaptionStream,
13174 Cea608Stream: Cea608Stream
13175};
13176
13177},{"../utils/stream":62}],50:[function(require,module,exports){
13178/**
13179 * mux.js
13180 *
13181 * Copyright (c) 2015 Brightcove
13182 * All rights reserved.
13183 *
13184 * A stream-based mp2t to mp4 converter. This utility can be used to
13185 * deliver mp4s to a SourceBuffer on platforms that support native
13186 * Media Source Extensions.
13187 */
13188'use strict';
13189var Stream = require('../utils/stream.js'),
13190 CaptionStream = require('./caption-stream'),
13191 StreamTypes = require('./stream-types'),
13192 TimestampRolloverStream = require('./timestamp-rollover-stream').TimestampRolloverStream;
13193
13194var m2tsStreamTypes = require('./stream-types.js');
13195
13196// object types
13197var TransportPacketStream, TransportParseStream, ElementaryStream;
13198
13199// constants
13200var
13201 MP2T_PACKET_LENGTH = 188, // bytes
13202 SYNC_BYTE = 0x47;
13203
13204/**
13205 * Splits an incoming stream of binary data into MPEG-2 Transport
13206 * Stream packets.
13207 */
13208TransportPacketStream = function() {
13209 var
13210 buffer = new Uint8Array(MP2T_PACKET_LENGTH),
13211 bytesInBuffer = 0;
13212
13213 TransportPacketStream.prototype.init.call(this);
13214
13215 // Deliver new bytes to the stream.
13216
13217 this.push = function(bytes) {
13218 var
13219 startIndex = 0,
13220 endIndex = MP2T_PACKET_LENGTH,
13221 everything;
13222
13223 // If there are bytes remaining from the last segment, prepend them to the
13224 // bytes that were pushed in
13225 if (bytesInBuffer) {
13226 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
13227 everything.set(buffer.subarray(0, bytesInBuffer));
13228 everything.set(bytes, bytesInBuffer);
13229 bytesInBuffer = 0;
13230 } else {
13231 everything = bytes;
13232 }
13233
13234 // While we have enough data for a packet
13235 while (endIndex < everything.byteLength) {
13236 // Look for a pair of start and end sync bytes in the data..
13237 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
13238 // We found a packet so emit it and jump one whole packet forward in
13239 // the stream
13240 this.trigger('data', everything.subarray(startIndex, endIndex));
13241 startIndex += MP2T_PACKET_LENGTH;
13242 endIndex += MP2T_PACKET_LENGTH;
13243 continue;
13244 }
13245 // If we get here, we have somehow become de-synchronized and we need to step
13246 // forward one byte at a time until we find a pair of sync bytes that denote
13247 // a packet
13248 startIndex++;
13249 endIndex++;
13250 }
13251
13252 // If there was some data left over at the end of the segment that couldn't
13253 // possibly be a whole packet, keep it because it might be the start of a packet
13254 // that continues in the next segment
13255 if (startIndex < everything.byteLength) {
13256 buffer.set(everything.subarray(startIndex), 0);
13257 bytesInBuffer = everything.byteLength - startIndex;
13258 }
13259 };
13260
13261 this.flush = function() {
13262 // If the buffer contains a whole packet when we are being flushed, emit it
13263 // and empty the buffer. Otherwise hold onto the data because it may be
13264 // important for decoding the next segment
13265 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
13266 this.trigger('data', buffer);
13267 bytesInBuffer = 0;
13268 }
13269 this.trigger('done');
13270 };
13271};
13272TransportPacketStream.prototype = new Stream();
13273
13274/**
13275 * Accepts an MP2T TransportPacketStream and emits data events with parsed
13276 * forms of the individual transport stream packets.
13277 */
13278TransportParseStream = function() {
13279 var parsePsi, parsePat, parsePmt, self;
13280 TransportParseStream.prototype.init.call(this);
13281 self = this;
13282
13283 this.packetsWaitingForPmt = [];
13284 this.programMapTable = undefined;
13285
13286 parsePsi = function(payload, psi) {
13287 var offset = 0;
13288
13289 // PSI packets may be split into multiple sections and those
13290 // sections may be split into multiple packets. If a PSI
13291 // section starts in this packet, the payload_unit_start_indicator
13292 // will be true and the first byte of the payload will indicate
13293 // the offset from the current position to the start of the
13294 // section.
13295 if (psi.payloadUnitStartIndicator) {
13296 offset += payload[offset] + 1;
13297 }
13298
13299 if (psi.type === 'pat') {
13300 parsePat(payload.subarray(offset), psi);
13301 } else {
13302 parsePmt(payload.subarray(offset), psi);
13303 }
13304 };
13305
13306 parsePat = function(payload, pat) {
13307 pat.section_number = payload[7]; // eslint-disable-line camelcase
13308 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
13309
13310 // skip the PSI header and parse the first PMT entry
13311 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
13312 pat.pmtPid = self.pmtPid;
13313 };
13314
13315 /**
13316 * Parse out the relevant fields of a Program Map Table (PMT).
13317 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
13318 * packet. The first byte in this array should be the table_id
13319 * field.
13320 * @param pmt {object} the object that should be decorated with
13321 * fields parsed from the PMT.
13322 */
13323 parsePmt = function(payload, pmt) {
13324 var sectionLength, tableEnd, programInfoLength, offset;
13325
13326 // PMTs can be sent ahead of the time when they should actually
13327 // take effect. We don't believe this should ever be the case
13328 // for HLS but we'll ignore "forward" PMT declarations if we see
13329 // them. Future PMT declarations have the current_next_indicator
13330 // set to zero.
13331 if (!(payload[5] & 0x01)) {
13332 return;
13333 }
13334
13335 // overwrite any existing program map table
13336 self.programMapTable = {
13337 video: null,
13338 audio: null,
13339 'timed-metadata': {}
13340 };
13341
13342 // the mapping table ends at the end of the current section
13343 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
13344 tableEnd = 3 + sectionLength - 4;
13345
13346 // to determine where the table is, we have to figure out how
13347 // long the program info descriptors are
13348 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
13349
13350 // advance the offset to the first entry in the mapping table
13351 offset = 12 + programInfoLength;
13352 while (offset < tableEnd) {
13353 var streamType = payload[offset];
13354 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
13355
13356 // only map a single elementary_pid for audio and video stream types
13357 // TODO: should this be done for metadata too? for now maintain behavior of
13358 // multiple metadata streams
13359 if (streamType === StreamTypes.H264_STREAM_TYPE &&
13360 self.programMapTable.video === null) {
13361 self.programMapTable.video = pid;
13362 } else if (streamType === StreamTypes.ADTS_STREAM_TYPE &&
13363 self.programMapTable.audio === null) {
13364 self.programMapTable.audio = pid;
13365 } else if (streamType === StreamTypes.METADATA_STREAM_TYPE) {
13366 // map pid to stream type for metadata streams
13367 self.programMapTable['timed-metadata'][pid] = streamType;
13368 }
13369
13370 // move to the next table entry
13371 // skip past the elementary stream descriptors, if present
13372 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
13373 }
13374
13375 // record the map on the packet as well
13376 pmt.programMapTable = self.programMapTable;
13377 };
13378
13379 /**
13380 * Deliver a new MP2T packet to the stream.
13381 */
13382 this.push = function(packet) {
13383 var
13384 result = {},
13385 offset = 4;
13386
13387 result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
13388
13389 // pid is a 13-bit field starting at the last bit of packet[1]
13390 result.pid = packet[1] & 0x1f;
13391 result.pid <<= 8;
13392 result.pid |= packet[2];
13393
13394 // if an adaption field is present, its length is specified by the
13395 // fifth byte of the TS packet header. The adaptation field is
13396 // used to add stuffing to PES packets that don't fill a complete
13397 // TS packet, and to specify some forms of timing and control data
13398 // that we do not currently use.
13399 if (((packet[3] & 0x30) >>> 4) > 0x01) {
13400 offset += packet[offset] + 1;
13401 }
13402
13403 // parse the rest of the packet based on the type
13404 if (result.pid === 0) {
13405 result.type = 'pat';
13406 parsePsi(packet.subarray(offset), result);
13407 this.trigger('data', result);
13408 } else if (result.pid === this.pmtPid) {
13409 result.type = 'pmt';
13410 parsePsi(packet.subarray(offset), result);
13411 this.trigger('data', result);
13412
13413 // if there are any packets waiting for a PMT to be found, process them now
13414 while (this.packetsWaitingForPmt.length) {
13415 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
13416 }
13417 } else if (this.programMapTable === undefined) {
13418 // When we have not seen a PMT yet, defer further processing of
13419 // PES packets until one has been parsed
13420 this.packetsWaitingForPmt.push([packet, offset, result]);
13421 } else {
13422 this.processPes_(packet, offset, result);
13423 }
13424 };
13425
13426 this.processPes_ = function(packet, offset, result) {
13427 // set the appropriate stream type
13428 if (result.pid === this.programMapTable.video) {
13429 result.streamType = StreamTypes.H264_STREAM_TYPE;
13430 } else if (result.pid === this.programMapTable.audio) {
13431 result.streamType = StreamTypes.ADTS_STREAM_TYPE;
13432 } else {
13433 // if not video or audio, it is timed-metadata or unknown
13434 // if unknown, streamType will be undefined
13435 result.streamType = this.programMapTable['timed-metadata'][result.pid];
13436 }
13437
13438 result.type = 'pes';
13439 result.data = packet.subarray(offset);
13440
13441 this.trigger('data', result);
13442 };
13443
13444};
13445TransportParseStream.prototype = new Stream();
13446TransportParseStream.STREAM_TYPES = {
13447 h264: 0x1b,
13448 adts: 0x0f
13449};
13450
13451/**
13452 * Reconsistutes program elementary stream (PES) packets from parsed
13453 * transport stream packets. That is, if you pipe an
13454 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
13455 * events will be events which capture the bytes for individual PES
13456 * packets plus relevant metadata that has been extracted from the
13457 * container.
13458 */
13459ElementaryStream = function() {
13460 var
13461 self = this,
13462 // PES packet fragments
13463 video = {
13464 data: [],
13465 size: 0
13466 },
13467 audio = {
13468 data: [],
13469 size: 0
13470 },
13471 timedMetadata = {
13472 data: [],
13473 size: 0
13474 },
13475 parsePes = function(payload, pes) {
13476 var ptsDtsFlags;
13477
13478 // get the packet length, this will be 0 for video
13479 pes.packetLength = 6 + ((payload[4] << 8) | payload[5]);
13480
13481 // find out if this packets starts a new keyframe
13482 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
13483 // PES packets may be annotated with a PTS value, or a PTS value
13484 // and a DTS value. Determine what combination of values is
13485 // available to work with.
13486 ptsDtsFlags = payload[7];
13487
13488 // PTS and DTS are normally stored as a 33-bit number. Javascript
13489 // performs all bitwise operations on 32-bit integers but javascript
13490 // supports a much greater range (52-bits) of integer using standard
13491 // mathematical operations.
13492 // We construct a 31-bit value using bitwise operators over the 31
13493 // most significant bits and then multiply by 4 (equal to a left-shift
13494 // of 2) before we add the final 2 least significant bits of the
13495 // timestamp (equal to an OR.)
13496 if (ptsDtsFlags & 0xC0) {
13497 // the PTS and DTS are not written out directly. For information
13498 // on how they are encoded, see
13499 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
13500 pes.pts = (payload[9] & 0x0E) << 27 |
13501 (payload[10] & 0xFF) << 20 |
13502 (payload[11] & 0xFE) << 12 |
13503 (payload[12] & 0xFF) << 5 |
13504 (payload[13] & 0xFE) >>> 3;
13505 pes.pts *= 4; // Left shift by 2
13506 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
13507 pes.dts = pes.pts;
13508 if (ptsDtsFlags & 0x40) {
13509 pes.dts = (payload[14] & 0x0E) << 27 |
13510 (payload[15] & 0xFF) << 20 |
13511 (payload[16] & 0xFE) << 12 |
13512 (payload[17] & 0xFF) << 5 |
13513 (payload[18] & 0xFE) >>> 3;
13514 pes.dts *= 4; // Left shift by 2
13515 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
13516 }
13517 }
13518 // the data section starts immediately after the PES header.
13519 // pes_header_data_length specifies the number of header bytes
13520 // that follow the last byte of the field.
13521 pes.data = payload.subarray(9 + payload[8]);
13522 },
13523 flushStream = function(stream, type, forceFlush) {
13524 var
13525 packetData = new Uint8Array(stream.size),
13526 event = {
13527 type: type
13528 },
13529 i = 0,
13530 offset = 0,
13531 packetFlushable = false,
13532 fragment;
13533
13534 // do nothing if there is not enough buffered data for a complete
13535 // PES header
13536 if (!stream.data.length || stream.size < 9) {
13537 return;
13538 }
13539 event.trackId = stream.data[0].pid;
13540
13541 // reassemble the packet
13542 for (i = 0; i < stream.data.length; i++) {
13543 fragment = stream.data[i];
13544
13545 packetData.set(fragment.data, offset);
13546 offset += fragment.data.byteLength;
13547 }
13548
13549 // parse assembled packet's PES header
13550 parsePes(packetData, event);
13551
13552 // non-video PES packets MUST have a non-zero PES_packet_length
13553 // check that they match before we do a flush
13554 packetFlushable = type === 'video' || event.packetLength === stream.size;
13555
13556 // flush pending packets if the conditions are right
13557 if (forceFlush || packetFlushable) {
13558 stream.size = 0;
13559 stream.data.length = 0;
13560 }
13561
13562 // only emit packets that are complete. this is to avoid assembling
13563 // incomplete PES packets due to poor segmentation
13564 if (packetFlushable) {
13565 self.trigger('data', event);
13566 }
13567 };
13568
13569 ElementaryStream.prototype.init.call(this);
13570
13571 this.push = function(data) {
13572 ({
13573 pat: function() {
13574 // we have to wait for the PMT to arrive as well before we
13575 // have any meaningful metadata
13576 },
13577 pes: function() {
13578 var stream, streamType;
13579
13580 switch (data.streamType) {
13581 case StreamTypes.H264_STREAM_TYPE:
13582 case m2tsStreamTypes.H264_STREAM_TYPE:
13583 stream = video;
13584 streamType = 'video';
13585 break;
13586 case StreamTypes.ADTS_STREAM_TYPE:
13587 stream = audio;
13588 streamType = 'audio';
13589 break;
13590 case StreamTypes.METADATA_STREAM_TYPE:
13591 stream = timedMetadata;
13592 streamType = 'timed-metadata';
13593 break;
13594 default:
13595 // ignore unknown stream types
13596 return;
13597 }
13598
13599 // if a new packet is starting, we can flush the completed
13600 // packet
13601 if (data.payloadUnitStartIndicator) {
13602 flushStream(stream, streamType, true);
13603 }
13604
13605 // buffer this fragment until we are sure we've received the
13606 // complete payload
13607 stream.data.push(data);
13608 stream.size += data.data.byteLength;
13609 },
13610 pmt: function() {
13611 var
13612 event = {
13613 type: 'metadata',
13614 tracks: []
13615 },
13616 programMapTable = data.programMapTable;
13617
13618 // translate audio and video streams to tracks
13619 if (programMapTable.video !== null) {
13620 event.tracks.push({
13621 timelineStartInfo: {
13622 baseMediaDecodeTime: 0
13623 },
13624 id: +programMapTable.video,
13625 codec: 'avc',
13626 type: 'video'
13627 });
13628 }
13629 if (programMapTable.audio !== null) {
13630 event.tracks.push({
13631 timelineStartInfo: {
13632 baseMediaDecodeTime: 0
13633 },
13634 id: +programMapTable.audio,
13635 codec: 'adts',
13636 type: 'audio'
13637 });
13638 }
13639
13640 self.trigger('data', event);
13641 }
13642 })[data.type]();
13643 };
13644
13645 /**
13646 * Flush any remaining input. Video PES packets may be of variable
13647 * length. Normally, the start of a new video packet can trigger the
13648 * finalization of the previous packet. That is not possible if no
13649 * more video is forthcoming, however. In that case, some other
13650 * mechanism (like the end of the file) has to be employed. When it is
13651 * clear that no additional data is forthcoming, calling this method
13652 * will flush the buffered packets.
13653 */
13654 this.flush = function() {
13655 // !!THIS ORDER IS IMPORTANT!!
13656 // video first then audio
13657 flushStream(video, 'video');
13658 flushStream(audio, 'audio');
13659 flushStream(timedMetadata, 'timed-metadata');
13660 this.trigger('done');
13661 };
13662};
13663ElementaryStream.prototype = new Stream();
13664
13665var m2ts = {
13666 PAT_PID: 0x0000,
13667 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
13668 TransportPacketStream: TransportPacketStream,
13669 TransportParseStream: TransportParseStream,
13670 ElementaryStream: ElementaryStream,
13671 TimestampRolloverStream: TimestampRolloverStream,
13672 CaptionStream: CaptionStream.CaptionStream,
13673 Cea608Stream: CaptionStream.Cea608Stream,
13674 MetadataStream: require('./metadata-stream')
13675};
13676
13677for (var type in StreamTypes) {
13678 if (StreamTypes.hasOwnProperty(type)) {
13679 m2ts[type] = StreamTypes[type];
13680 }
13681}
13682
13683module.exports = m2ts;
13684
13685},{"../utils/stream.js":62,"./caption-stream":49,"./metadata-stream":51,"./stream-types":53,"./stream-types.js":53,"./timestamp-rollover-stream":54}],51:[function(require,module,exports){
13686/**
13687 * Accepts program elementary stream (PES) data events and parses out
13688 * ID3 metadata from them, if present.
13689 * @see http://id3.org/id3v2.3.0
13690 */
13691'use strict';
13692var
13693 Stream = require('../utils/stream'),
13694 StreamTypes = require('./stream-types'),
13695 // return a percent-encoded representation of the specified byte range
13696 // @see http://en.wikipedia.org/wiki/Percent-encoding
13697 percentEncode = function(bytes, start, end) {
13698 var i, result = '';
13699 for (i = start; i < end; i++) {
13700 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
13701 }
13702 return result;
13703 },
13704 // return the string representation of the specified byte range,
13705 // interpreted as UTf-8.
13706 parseUtf8 = function(bytes, start, end) {
13707 return decodeURIComponent(percentEncode(bytes, start, end));
13708 },
13709 // return the string representation of the specified byte range,
13710 // interpreted as ISO-8859-1.
13711 parseIso88591 = function(bytes, start, end) {
13712 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
13713 },
13714 parseSyncSafeInteger = function(data) {
13715 return (data[0] << 21) |
13716 (data[1] << 14) |
13717 (data[2] << 7) |
13718 (data[3]);
13719 },
13720 tagParsers = {
13721 TXXX: function(tag) {
13722 var i;
13723 if (tag.data[0] !== 3) {
13724 // ignore frames with unrecognized character encodings
13725 return;
13726 }
13727
13728 for (i = 1; i < tag.data.length; i++) {
13729 if (tag.data[i] === 0) {
13730 // parse the text fields
13731 tag.description = parseUtf8(tag.data, 1, i);
13732 // do not include the null terminator in the tag value
13733 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
13734 break;
13735 }
13736 }
13737 tag.data = tag.value;
13738 },
13739 WXXX: function(tag) {
13740 var i;
13741 if (tag.data[0] !== 3) {
13742 // ignore frames with unrecognized character encodings
13743 return;
13744 }
13745
13746 for (i = 1; i < tag.data.length; i++) {
13747 if (tag.data[i] === 0) {
13748 // parse the description and URL fields
13749 tag.description = parseUtf8(tag.data, 1, i);
13750 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
13751 break;
13752 }
13753 }
13754 },
13755 PRIV: function(tag) {
13756 var i;
13757
13758 for (i = 0; i < tag.data.length; i++) {
13759 if (tag.data[i] === 0) {
13760 // parse the description and URL fields
13761 tag.owner = parseIso88591(tag.data, 0, i);
13762 break;
13763 }
13764 }
13765 tag.privateData = tag.data.subarray(i + 1);
13766 tag.data = tag.privateData;
13767 }
13768 },
13769 MetadataStream;
13770
13771MetadataStream = function(options) {
13772 var
13773 settings = {
13774 debug: !!(options && options.debug),
13775
13776 // the bytes of the program-level descriptor field in MP2T
13777 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
13778 // program element descriptors"
13779 descriptor: options && options.descriptor
13780 },
13781 // the total size in bytes of the ID3 tag being parsed
13782 tagSize = 0,
13783 // tag data that is not complete enough to be parsed
13784 buffer = [],
13785 // the total number of bytes currently in the buffer
13786 bufferSize = 0,
13787 i;
13788
13789 MetadataStream.prototype.init.call(this);
13790
13791 // calculate the text track in-band metadata track dispatch type
13792 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
13793 this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
13794 if (settings.descriptor) {
13795 for (i = 0; i < settings.descriptor.length; i++) {
13796 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
13797 }
13798 }
13799
13800 this.push = function(chunk) {
13801 var tag, frameStart, frameSize, frame, i, frameHeader;
13802 if (chunk.type !== 'timed-metadata') {
13803 return;
13804 }
13805
13806 // if data_alignment_indicator is set in the PES header,
13807 // we must have the start of a new ID3 tag. Assume anything
13808 // remaining in the buffer was malformed and throw it out
13809 if (chunk.dataAlignmentIndicator) {
13810 bufferSize = 0;
13811 buffer.length = 0;
13812 }
13813
13814 // ignore events that don't look like ID3 data
13815 if (buffer.length === 0 &&
13816 (chunk.data.length < 10 ||
13817 chunk.data[0] !== 'I'.charCodeAt(0) ||
13818 chunk.data[1] !== 'D'.charCodeAt(0) ||
13819 chunk.data[2] !== '3'.charCodeAt(0))) {
13820 if (settings.debug) {
13821 // eslint-disable-next-line no-console
13822 console.log('Skipping unrecognized metadata packet');
13823 }
13824 return;
13825 }
13826
13827 // add this chunk to the data we've collected so far
13828
13829 buffer.push(chunk);
13830 bufferSize += chunk.data.byteLength;
13831
13832 // grab the size of the entire frame from the ID3 header
13833 if (buffer.length === 1) {
13834 // the frame size is transmitted as a 28-bit integer in the
13835 // last four bytes of the ID3 header.
13836 // The most significant bit of each byte is dropped and the
13837 // results concatenated to recover the actual value.
13838 tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
13839
13840 // ID3 reports the tag size excluding the header but it's more
13841 // convenient for our comparisons to include it
13842 tagSize += 10;
13843 }
13844
13845 // if the entire frame has not arrived, wait for more data
13846 if (bufferSize < tagSize) {
13847 return;
13848 }
13849
13850 // collect the entire frame so it can be parsed
13851 tag = {
13852 data: new Uint8Array(tagSize),
13853 frames: [],
13854 pts: buffer[0].pts,
13855 dts: buffer[0].dts
13856 };
13857 for (i = 0; i < tagSize;) {
13858 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
13859 i += buffer[0].data.byteLength;
13860 bufferSize -= buffer[0].data.byteLength;
13861 buffer.shift();
13862 }
13863
13864 // find the start of the first frame and the end of the tag
13865 frameStart = 10;
13866 if (tag.data[5] & 0x40) {
13867 // advance the frame start past the extended header
13868 frameStart += 4; // header size field
13869 frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
13870
13871 // clip any padding off the end
13872 tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
13873 }
13874
13875 // parse one or more ID3 frames
13876 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
13877 do {
13878 // determine the number of bytes in this frame
13879 frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
13880 if (frameSize < 1) {
13881 // eslint-disable-next-line no-console
13882 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
13883 }
13884 frameHeader = String.fromCharCode(tag.data[frameStart],
13885 tag.data[frameStart + 1],
13886 tag.data[frameStart + 2],
13887 tag.data[frameStart + 3]);
13888
13889
13890 frame = {
13891 id: frameHeader,
13892 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
13893 };
13894 frame.key = frame.id;
13895 if (tagParsers[frame.id]) {
13896 tagParsers[frame.id](frame);
13897
13898 // handle the special PRIV frame used to indicate the start
13899 // time for raw AAC data
13900 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
13901 var
13902 d = frame.data,
13903 size = ((d[3] & 0x01) << 30) |
13904 (d[4] << 22) |
13905 (d[5] << 14) |
13906 (d[6] << 6) |
13907 (d[7] >>> 2);
13908
13909 size *= 4;
13910 size += d[7] & 0x03;
13911 frame.timeStamp = size;
13912 // in raw AAC, all subsequent data will be timestamped based
13913 // on the value of this frame
13914 // we couldn't have known the appropriate pts and dts before
13915 // parsing this ID3 tag so set those values now
13916 if (tag.pts === undefined && tag.dts === undefined) {
13917 tag.pts = frame.timeStamp;
13918 tag.dts = frame.timeStamp;
13919 }
13920 this.trigger('timestamp', frame);
13921 }
13922 }
13923 tag.frames.push(frame);
13924
13925 frameStart += 10; // advance past the frame header
13926 frameStart += frameSize; // advance past the frame body
13927 } while (frameStart < tagSize);
13928 this.trigger('data', tag);
13929 };
13930};
13931MetadataStream.prototype = new Stream();
13932
13933module.exports = MetadataStream;
13934
13935},{"../utils/stream":62,"./stream-types":53}],52:[function(require,module,exports){
13936/**
13937 * mux.js
13938 *
13939 * Copyright (c) 2016 Brightcove
13940 * All rights reserved.
13941 *
13942 * Utilities to detect basic properties and metadata about TS Segments.
13943 */
13944'use strict';
13945
13946var StreamTypes = require('./stream-types.js');
13947
13948var parsePid = function(packet) {
13949 var pid = packet[1] & 0x1f;
13950 pid <<= 8;
13951 pid |= packet[2];
13952 return pid;
13953};
13954
13955var parsePayloadUnitStartIndicator = function(packet) {
13956 return !!(packet[1] & 0x40);
13957};
13958
13959var parseAdaptionField = function(packet) {
13960 var offset = 0;
13961 // if an adaption field is present, its length is specified by the
13962 // fifth byte of the TS packet header. The adaptation field is
13963 // used to add stuffing to PES packets that don't fill a complete
13964 // TS packet, and to specify some forms of timing and control data
13965 // that we do not currently use.
13966 if (((packet[3] & 0x30) >>> 4) > 0x01) {
13967 offset += packet[4] + 1;
13968 }
13969 return offset;
13970};
13971
13972var parseType = function(packet, pmtPid) {
13973 var pid = parsePid(packet);
13974 if (pid === 0) {
13975 return 'pat';
13976 } else if (pid === pmtPid) {
13977 return 'pmt';
13978 } else if (pmtPid) {
13979 return 'pes';
13980 }
13981 return null;
13982};
13983
13984var parsePat = function(packet) {
13985 var pusi = parsePayloadUnitStartIndicator(packet);
13986 var offset = 4 + parseAdaptionField(packet);
13987
13988 if (pusi) {
13989 offset += packet[offset] + 1;
13990 }
13991
13992 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
13993};
13994
13995var parsePmt = function(packet) {
13996 var programMapTable = {};
13997 var pusi = parsePayloadUnitStartIndicator(packet);
13998 var payloadOffset = 4 + parseAdaptionField(packet);
13999
14000 if (pusi) {
14001 payloadOffset += packet[payloadOffset] + 1;
14002 }
14003
14004 // PMTs can be sent ahead of the time when they should actually
14005 // take effect. We don't believe this should ever be the case
14006 // for HLS but we'll ignore "forward" PMT declarations if we see
14007 // them. Future PMT declarations have the current_next_indicator
14008 // set to zero.
14009 if (!(packet[payloadOffset + 5] & 0x01)) {
14010 return;
14011 }
14012
14013 var sectionLength, tableEnd, programInfoLength;
14014 // the mapping table ends at the end of the current section
14015 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
14016 tableEnd = 3 + sectionLength - 4;
14017
14018 // to determine where the table is, we have to figure out how
14019 // long the program info descriptors are
14020 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
14021
14022 // advance the offset to the first entry in the mapping table
14023 var offset = 12 + programInfoLength;
14024 while (offset < tableEnd) {
14025 var i = payloadOffset + offset;
14026 // add an entry that maps the elementary_pid to the stream_type
14027 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
14028
14029 // move to the next table entry
14030 // skip past the elementary stream descriptors, if present
14031 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
14032 }
14033 return programMapTable;
14034};
14035
14036var parsePesType = function(packet, programMapTable) {
14037 var pid = parsePid(packet);
14038 var type = programMapTable[pid];
14039 switch (type) {
14040 case StreamTypes.H264_STREAM_TYPE:
14041 return 'video';
14042 case StreamTypes.ADTS_STREAM_TYPE:
14043 return 'audio';
14044 case StreamTypes.METADATA_STREAM_TYPE:
14045 return 'timed-metadata';
14046 default:
14047 return null;
14048 }
14049};
14050
14051var parsePesTime = function(packet) {
14052 var pusi = parsePayloadUnitStartIndicator(packet);
14053 if (!pusi) {
14054 return null;
14055 }
14056
14057 var offset = 4 + parseAdaptionField(packet);
14058
14059 if (offset >= packet.byteLength) {
14060 // From the H 222.0 MPEG-TS spec
14061 // "For transport stream packets carrying PES packets, stuffing is needed when there
14062 // is insufficient PES packet data to completely fill the transport stream packet
14063 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
14064 // the sum of the lengths of the data elements in it, so that the payload bytes
14065 // remaining after the adaptation field exactly accommodates the available PES packet
14066 // data."
14067 //
14068 // If the offset is >= the length of the packet, then the packet contains no data
14069 // and instead is just adaption field stuffing bytes
14070 return null;
14071 }
14072
14073 var pes = null;
14074 var ptsDtsFlags;
14075
14076 // PES packets may be annotated with a PTS value, or a PTS value
14077 // and a DTS value. Determine what combination of values is
14078 // available to work with.
14079 ptsDtsFlags = packet[offset + 7];
14080
14081 // PTS and DTS are normally stored as a 33-bit number. Javascript
14082 // performs all bitwise operations on 32-bit integers but javascript
14083 // supports a much greater range (52-bits) of integer using standard
14084 // mathematical operations.
14085 // We construct a 31-bit value using bitwise operators over the 31
14086 // most significant bits and then multiply by 4 (equal to a left-shift
14087 // of 2) before we add the final 2 least significant bits of the
14088 // timestamp (equal to an OR.)
14089 if (ptsDtsFlags & 0xC0) {
14090 pes = {};
14091 // the PTS and DTS are not written out directly. For information
14092 // on how they are encoded, see
14093 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
14094 pes.pts = (packet[offset + 9] & 0x0E) << 27 |
14095 (packet[offset + 10] & 0xFF) << 20 |
14096 (packet[offset + 11] & 0xFE) << 12 |
14097 (packet[offset + 12] & 0xFF) << 5 |
14098 (packet[offset + 13] & 0xFE) >>> 3;
14099 pes.pts *= 4; // Left shift by 2
14100 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
14101 pes.dts = pes.pts;
14102 if (ptsDtsFlags & 0x40) {
14103 pes.dts = (packet[offset + 14] & 0x0E) << 27 |
14104 (packet[offset + 15] & 0xFF) << 20 |
14105 (packet[offset + 16] & 0xFE) << 12 |
14106 (packet[offset + 17] & 0xFF) << 5 |
14107 (packet[offset + 18] & 0xFE) >>> 3;
14108 pes.dts *= 4; // Left shift by 2
14109 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
14110 }
14111 }
14112 return pes;
14113};
14114
14115var parseNalUnitType = function(type) {
14116 switch (type) {
14117 case 0x05:
14118 return 'slice_layer_without_partitioning_rbsp_idr';
14119 case 0x06:
14120 return 'sei_rbsp';
14121 case 0x07:
14122 return 'seq_parameter_set_rbsp';
14123 case 0x08:
14124 return 'pic_parameter_set_rbsp';
14125 case 0x09:
14126 return 'access_unit_delimiter_rbsp';
14127 default:
14128 return null;
14129 }
14130};
14131
14132var videoPacketContainsKeyFrame = function(packet) {
14133 var offset = 4 + parseAdaptionField(packet);
14134 var frameBuffer = packet.subarray(offset);
14135 var frameI = 0;
14136 var frameSyncPoint = 0;
14137 var foundKeyFrame = false;
14138 var nalType;
14139
14140 // advance the sync point to a NAL start, if necessary
14141 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
14142 if (frameBuffer[frameSyncPoint + 2] === 1) {
14143 // the sync point is properly aligned
14144 frameI = frameSyncPoint + 5;
14145 break;
14146 }
14147 }
14148
14149 while (frameI < frameBuffer.byteLength) {
14150 // look at the current byte to determine if we've hit the end of
14151 // a NAL unit boundary
14152 switch (frameBuffer[frameI]) {
14153 case 0:
14154 // skip past non-sync sequences
14155 if (frameBuffer[frameI - 1] !== 0) {
14156 frameI += 2;
14157 break;
14158 } else if (frameBuffer[frameI - 2] !== 0) {
14159 frameI++;
14160 break;
14161 }
14162
14163 if (frameSyncPoint + 3 !== frameI - 2) {
14164 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
14165 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
14166 foundKeyFrame = true;
14167 }
14168 }
14169
14170 // drop trailing zeroes
14171 do {
14172 frameI++;
14173 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
14174 frameSyncPoint = frameI - 2;
14175 frameI += 3;
14176 break;
14177 case 1:
14178 // skip past non-sync sequences
14179 if (frameBuffer[frameI - 1] !== 0 ||
14180 frameBuffer[frameI - 2] !== 0) {
14181 frameI += 3;
14182 break;
14183 }
14184
14185 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
14186 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
14187 foundKeyFrame = true;
14188 }
14189 frameSyncPoint = frameI - 2;
14190 frameI += 3;
14191 break;
14192 default:
14193 // the current byte isn't a one or zero, so it cannot be part
14194 // of a sync sequence
14195 frameI += 3;
14196 break;
14197 }
14198 }
14199 frameBuffer = frameBuffer.subarray(frameSyncPoint);
14200 frameI -= frameSyncPoint;
14201 frameSyncPoint = 0;
14202 // parse the final nal
14203 if (frameBuffer && frameBuffer.byteLength > 3) {
14204 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
14205 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
14206 foundKeyFrame = true;
14207 }
14208 }
14209
14210 return foundKeyFrame;
14211};
14212
14213
14214module.exports = {
14215 parseType: parseType,
14216 parsePat: parsePat,
14217 parsePmt: parsePmt,
14218 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
14219 parsePesType: parsePesType,
14220 parsePesTime: parsePesTime,
14221 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
14222};
14223
14224},{"./stream-types.js":53}],53:[function(require,module,exports){
14225'use strict';
14226
14227module.exports = {
14228 H264_STREAM_TYPE: 0x1B,
14229 ADTS_STREAM_TYPE: 0x0F,
14230 METADATA_STREAM_TYPE: 0x15
14231};
14232
14233},{}],54:[function(require,module,exports){
14234/**
14235 * mux.js
14236 *
14237 * Copyright (c) 2016 Brightcove
14238 * All rights reserved.
14239 *
14240 * Accepts program elementary stream (PES) data events and corrects
14241 * decode and presentation time stamps to account for a rollover
14242 * of the 33 bit value.
14243 */
14244
14245'use strict';
14246
14247var Stream = require('../utils/stream');
14248
14249var MAX_TS = 8589934592;
14250
14251var RO_THRESH = 4294967296;
14252
14253var handleRollover = function(value, reference) {
14254 var direction = 1;
14255
14256 if (value > reference) {
14257 // If the current timestamp value is greater than our reference timestamp and we detect a
14258 // timestamp rollover, this means the roll over is happening in the opposite direction.
14259 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
14260 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
14261 // rollover point. In loading this segment, the timestamp values will be very large,
14262 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
14263 // the time stamp to be `value - 2^33`.
14264 direction = -1;
14265 }
14266
14267 // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
14268 // cause an incorrect adjustment.
14269 while (Math.abs(reference - value) > RO_THRESH) {
14270 value += (direction * MAX_TS);
14271 }
14272
14273 return value;
14274};
14275
14276var TimestampRolloverStream = function(type) {
14277 var lastDTS, referenceDTS;
14278
14279 TimestampRolloverStream.prototype.init.call(this);
14280
14281 this.type_ = type;
14282
14283 this.push = function(data) {
14284 if (data.type !== this.type_) {
14285 return;
14286 }
14287
14288 if (referenceDTS === undefined) {
14289 referenceDTS = data.dts;
14290 }
14291
14292 data.dts = handleRollover(data.dts, referenceDTS);
14293 data.pts = handleRollover(data.pts, referenceDTS);
14294
14295 lastDTS = data.dts;
14296
14297 this.trigger('data', data);
14298 };
14299
14300 this.flush = function() {
14301 referenceDTS = lastDTS;
14302 this.trigger('done');
14303 };
14304
14305 this.discontinuity = function() {
14306 referenceDTS = void 0;
14307 lastDTS = void 0;
14308 };
14309
14310};
14311
14312TimestampRolloverStream.prototype = new Stream();
14313
14314module.exports = {
14315 TimestampRolloverStream: TimestampRolloverStream,
14316 handleRollover: handleRollover
14317};
14318
14319},{"../utils/stream":62}],55:[function(require,module,exports){
14320module.exports = {
14321 generator: require('./mp4-generator'),
14322 Transmuxer: require('./transmuxer').Transmuxer,
14323 AudioSegmentStream: require('./transmuxer').AudioSegmentStream,
14324 VideoSegmentStream: require('./transmuxer').VideoSegmentStream
14325};
14326
14327},{"./mp4-generator":56,"./transmuxer":58}],56:[function(require,module,exports){
14328/**
14329 * mux.js
14330 *
14331 * Copyright (c) 2015 Brightcove
14332 * All rights reserved.
14333 *
14334 * Functions that generate fragmented MP4s suitable for use with Media
14335 * Source Extensions.
14336 */
14337'use strict';
14338
14339var UINT32_MAX = Math.pow(2, 32) - 1;
14340
14341var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd,
14342 trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex,
14343 trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR,
14344 AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
14345
14346// pre-calculate constants
14347(function() {
14348 var i;
14349 types = {
14350 avc1: [], // codingname
14351 avcC: [],
14352 btrt: [],
14353 dinf: [],
14354 dref: [],
14355 esds: [],
14356 ftyp: [],
14357 hdlr: [],
14358 mdat: [],
14359 mdhd: [],
14360 mdia: [],
14361 mfhd: [],
14362 minf: [],
14363 moof: [],
14364 moov: [],
14365 mp4a: [], // codingname
14366 mvex: [],
14367 mvhd: [],
14368 sdtp: [],
14369 smhd: [],
14370 stbl: [],
14371 stco: [],
14372 stsc: [],
14373 stsd: [],
14374 stsz: [],
14375 stts: [],
14376 styp: [],
14377 tfdt: [],
14378 tfhd: [],
14379 traf: [],
14380 trak: [],
14381 trun: [],
14382 trex: [],
14383 tkhd: [],
14384 vmhd: []
14385 };
14386
14387 // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
14388 // don't throw an error
14389 if (typeof Uint8Array === 'undefined') {
14390 return;
14391 }
14392
14393 for (i in types) {
14394 if (types.hasOwnProperty(i)) {
14395 types[i] = [
14396 i.charCodeAt(0),
14397 i.charCodeAt(1),
14398 i.charCodeAt(2),
14399 i.charCodeAt(3)
14400 ];
14401 }
14402 }
14403
14404 MAJOR_BRAND = new Uint8Array([
14405 'i'.charCodeAt(0),
14406 's'.charCodeAt(0),
14407 'o'.charCodeAt(0),
14408 'm'.charCodeAt(0)
14409 ]);
14410 AVC1_BRAND = new Uint8Array([
14411 'a'.charCodeAt(0),
14412 'v'.charCodeAt(0),
14413 'c'.charCodeAt(0),
14414 '1'.charCodeAt(0)
14415 ]);
14416 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
14417 VIDEO_HDLR = new Uint8Array([
14418 0x00, // version 0
14419 0x00, 0x00, 0x00, // flags
14420 0x00, 0x00, 0x00, 0x00, // pre_defined
14421 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
14422 0x00, 0x00, 0x00, 0x00, // reserved
14423 0x00, 0x00, 0x00, 0x00, // reserved
14424 0x00, 0x00, 0x00, 0x00, // reserved
14425 0x56, 0x69, 0x64, 0x65,
14426 0x6f, 0x48, 0x61, 0x6e,
14427 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
14428 ]);
14429 AUDIO_HDLR = new Uint8Array([
14430 0x00, // version 0
14431 0x00, 0x00, 0x00, // flags
14432 0x00, 0x00, 0x00, 0x00, // pre_defined
14433 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
14434 0x00, 0x00, 0x00, 0x00, // reserved
14435 0x00, 0x00, 0x00, 0x00, // reserved
14436 0x00, 0x00, 0x00, 0x00, // reserved
14437 0x53, 0x6f, 0x75, 0x6e,
14438 0x64, 0x48, 0x61, 0x6e,
14439 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
14440 ]);
14441 HDLR_TYPES = {
14442 video: VIDEO_HDLR,
14443 audio: AUDIO_HDLR
14444 };
14445 DREF = new Uint8Array([
14446 0x00, // version 0
14447 0x00, 0x00, 0x00, // flags
14448 0x00, 0x00, 0x00, 0x01, // entry_count
14449 0x00, 0x00, 0x00, 0x0c, // entry_size
14450 0x75, 0x72, 0x6c, 0x20, // 'url' type
14451 0x00, // version 0
14452 0x00, 0x00, 0x01 // entry_flags
14453 ]);
14454 SMHD = new Uint8Array([
14455 0x00, // version
14456 0x00, 0x00, 0x00, // flags
14457 0x00, 0x00, // balance, 0 means centered
14458 0x00, 0x00 // reserved
14459 ]);
14460 STCO = new Uint8Array([
14461 0x00, // version
14462 0x00, 0x00, 0x00, // flags
14463 0x00, 0x00, 0x00, 0x00 // entry_count
14464 ]);
14465 STSC = STCO;
14466 STSZ = new Uint8Array([
14467 0x00, // version
14468 0x00, 0x00, 0x00, // flags
14469 0x00, 0x00, 0x00, 0x00, // sample_size
14470 0x00, 0x00, 0x00, 0x00 // sample_count
14471 ]);
14472 STTS = STCO;
14473 VMHD = new Uint8Array([
14474 0x00, // version
14475 0x00, 0x00, 0x01, // flags
14476 0x00, 0x00, // graphicsmode
14477 0x00, 0x00,
14478 0x00, 0x00,
14479 0x00, 0x00 // opcolor
14480 ]);
14481}());
14482
14483box = function(type) {
14484 var
14485 payload = [],
14486 size = 0,
14487 i,
14488 result,
14489 view;
14490
14491 for (i = 1; i < arguments.length; i++) {
14492 payload.push(arguments[i]);
14493 }
14494
14495 i = payload.length;
14496
14497 // calculate the total size we need to allocate
14498 while (i--) {
14499 size += payload[i].byteLength;
14500 }
14501 result = new Uint8Array(size + 8);
14502 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
14503 view.setUint32(0, result.byteLength);
14504 result.set(type, 4);
14505
14506 // copy the payload into the result
14507 for (i = 0, size = 8; i < payload.length; i++) {
14508 result.set(payload[i], size);
14509 size += payload[i].byteLength;
14510 }
14511 return result;
14512};
14513
14514dinf = function() {
14515 return box(types.dinf, box(types.dref, DREF));
14516};
14517
14518esds = function(track) {
14519 return box(types.esds, new Uint8Array([
14520 0x00, // version
14521 0x00, 0x00, 0x00, // flags
14522
14523 // ES_Descriptor
14524 0x03, // tag, ES_DescrTag
14525 0x19, // length
14526 0x00, 0x00, // ES_ID
14527 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
14528
14529 // DecoderConfigDescriptor
14530 0x04, // tag, DecoderConfigDescrTag
14531 0x11, // length
14532 0x40, // object type
14533 0x15, // streamType
14534 0x00, 0x06, 0x00, // bufferSizeDB
14535 0x00, 0x00, 0xda, 0xc0, // maxBitrate
14536 0x00, 0x00, 0xda, 0xc0, // avgBitrate
14537
14538 // DecoderSpecificInfo
14539 0x05, // tag, DecoderSpecificInfoTag
14540 0x02, // length
14541 // ISO/IEC 14496-3, AudioSpecificConfig
14542 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
14543 (track.audioobjecttype << 3) | (track.samplingfrequencyindex >>> 1),
14544 (track.samplingfrequencyindex << 7) | (track.channelcount << 3),
14545 0x06, 0x01, 0x02 // GASpecificConfig
14546 ]));
14547};
14548
14549ftyp = function() {
14550 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
14551};
14552
14553hdlr = function(type) {
14554 return box(types.hdlr, HDLR_TYPES[type]);
14555};
14556mdat = function(data) {
14557 return box(types.mdat, data);
14558};
14559mdhd = function(track) {
14560 var result = new Uint8Array([
14561 0x00, // version 0
14562 0x00, 0x00, 0x00, // flags
14563 0x00, 0x00, 0x00, 0x02, // creation_time
14564 0x00, 0x00, 0x00, 0x03, // modification_time
14565 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
14566
14567 (track.duration >>> 24) & 0xFF,
14568 (track.duration >>> 16) & 0xFF,
14569 (track.duration >>> 8) & 0xFF,
14570 track.duration & 0xFF, // duration
14571 0x55, 0xc4, // 'und' language (undetermined)
14572 0x00, 0x00
14573 ]);
14574
14575 // Use the sample rate from the track metadata, when it is
14576 // defined. The sample rate can be parsed out of an ADTS header, for
14577 // instance.
14578 if (track.samplerate) {
14579 result[12] = (track.samplerate >>> 24) & 0xFF;
14580 result[13] = (track.samplerate >>> 16) & 0xFF;
14581 result[14] = (track.samplerate >>> 8) & 0xFF;
14582 result[15] = (track.samplerate) & 0xFF;
14583 }
14584
14585 return box(types.mdhd, result);
14586};
14587mdia = function(track) {
14588 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
14589};
14590mfhd = function(sequenceNumber) {
14591 return box(types.mfhd, new Uint8Array([
14592 0x00,
14593 0x00, 0x00, 0x00, // flags
14594 (sequenceNumber & 0xFF000000) >> 24,
14595 (sequenceNumber & 0xFF0000) >> 16,
14596 (sequenceNumber & 0xFF00) >> 8,
14597 sequenceNumber & 0xFF // sequence_number
14598 ]));
14599};
14600minf = function(track) {
14601 return box(types.minf,
14602 track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD),
14603 dinf(),
14604 stbl(track));
14605};
14606moof = function(sequenceNumber, tracks) {
14607 var
14608 trackFragments = [],
14609 i = tracks.length;
14610 // build traf boxes for each track fragment
14611 while (i--) {
14612 trackFragments[i] = traf(tracks[i]);
14613 }
14614 return box.apply(null, [
14615 types.moof,
14616 mfhd(sequenceNumber)
14617 ].concat(trackFragments));
14618};
14619/**
14620 * Returns a movie box.
14621 * @param tracks {array} the tracks associated with this movie
14622 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
14623 */
14624moov = function(tracks) {
14625 var
14626 i = tracks.length,
14627 boxes = [];
14628
14629 while (i--) {
14630 boxes[i] = trak(tracks[i]);
14631 }
14632
14633 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
14634};
14635mvex = function(tracks) {
14636 var
14637 i = tracks.length,
14638 boxes = [];
14639
14640 while (i--) {
14641 boxes[i] = trex(tracks[i]);
14642 }
14643 return box.apply(null, [types.mvex].concat(boxes));
14644};
14645mvhd = function(duration) {
14646 var
14647 bytes = new Uint8Array([
14648 0x00, // version 0
14649 0x00, 0x00, 0x00, // flags
14650 0x00, 0x00, 0x00, 0x01, // creation_time
14651 0x00, 0x00, 0x00, 0x02, // modification_time
14652 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
14653 (duration & 0xFF000000) >> 24,
14654 (duration & 0xFF0000) >> 16,
14655 (duration & 0xFF00) >> 8,
14656 duration & 0xFF, // duration
14657 0x00, 0x01, 0x00, 0x00, // 1.0 rate
14658 0x01, 0x00, // 1.0 volume
14659 0x00, 0x00, // reserved
14660 0x00, 0x00, 0x00, 0x00, // reserved
14661 0x00, 0x00, 0x00, 0x00, // reserved
14662 0x00, 0x01, 0x00, 0x00,
14663 0x00, 0x00, 0x00, 0x00,
14664 0x00, 0x00, 0x00, 0x00,
14665 0x00, 0x00, 0x00, 0x00,
14666 0x00, 0x01, 0x00, 0x00,
14667 0x00, 0x00, 0x00, 0x00,
14668 0x00, 0x00, 0x00, 0x00,
14669 0x00, 0x00, 0x00, 0x00,
14670 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
14671 0x00, 0x00, 0x00, 0x00,
14672 0x00, 0x00, 0x00, 0x00,
14673 0x00, 0x00, 0x00, 0x00,
14674 0x00, 0x00, 0x00, 0x00,
14675 0x00, 0x00, 0x00, 0x00,
14676 0x00, 0x00, 0x00, 0x00, // pre_defined
14677 0xff, 0xff, 0xff, 0xff // next_track_ID
14678 ]);
14679 return box(types.mvhd, bytes);
14680};
14681
14682sdtp = function(track) {
14683 var
14684 samples = track.samples || [],
14685 bytes = new Uint8Array(4 + samples.length),
14686 flags,
14687 i;
14688
14689 // leave the full box header (4 bytes) all zero
14690
14691 // write the sample table
14692 for (i = 0; i < samples.length; i++) {
14693 flags = samples[i].flags;
14694
14695 bytes[i + 4] = (flags.dependsOn << 4) |
14696 (flags.isDependedOn << 2) |
14697 (flags.hasRedundancy);
14698 }
14699
14700 return box(types.sdtp,
14701 bytes);
14702};
14703
14704stbl = function(track) {
14705 return box(types.stbl,
14706 stsd(track),
14707 box(types.stts, STTS),
14708 box(types.stsc, STSC),
14709 box(types.stsz, STSZ),
14710 box(types.stco, STCO));
14711};
14712
14713(function() {
14714 var videoSample, audioSample;
14715
14716 stsd = function(track) {
14717
14718 return box(types.stsd, new Uint8Array([
14719 0x00, // version 0
14720 0x00, 0x00, 0x00, // flags
14721 0x00, 0x00, 0x00, 0x01
14722 ]), track.type === 'video' ? videoSample(track) : audioSample(track));
14723 };
14724
14725 videoSample = function(track) {
14726 var
14727 sps = track.sps || [],
14728 pps = track.pps || [],
14729 sequenceParameterSets = [],
14730 pictureParameterSets = [],
14731 i;
14732
14733 // assemble the SPSs
14734 for (i = 0; i < sps.length; i++) {
14735 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
14736 sequenceParameterSets.push((sps[i].byteLength & 0xFF)); // sequenceParameterSetLength
14737 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
14738 }
14739
14740 // assemble the PPSs
14741 for (i = 0; i < pps.length; i++) {
14742 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
14743 pictureParameterSets.push((pps[i].byteLength & 0xFF));
14744 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
14745 }
14746
14747 return box(types.avc1, new Uint8Array([
14748 0x00, 0x00, 0x00,
14749 0x00, 0x00, 0x00, // reserved
14750 0x00, 0x01, // data_reference_index
14751 0x00, 0x00, // pre_defined
14752 0x00, 0x00, // reserved
14753 0x00, 0x00, 0x00, 0x00,
14754 0x00, 0x00, 0x00, 0x00,
14755 0x00, 0x00, 0x00, 0x00, // pre_defined
14756 (track.width & 0xff00) >> 8,
14757 track.width & 0xff, // width
14758 (track.height & 0xff00) >> 8,
14759 track.height & 0xff, // height
14760 0x00, 0x48, 0x00, 0x00, // horizresolution
14761 0x00, 0x48, 0x00, 0x00, // vertresolution
14762 0x00, 0x00, 0x00, 0x00, // reserved
14763 0x00, 0x01, // frame_count
14764 0x13,
14765 0x76, 0x69, 0x64, 0x65,
14766 0x6f, 0x6a, 0x73, 0x2d,
14767 0x63, 0x6f, 0x6e, 0x74,
14768 0x72, 0x69, 0x62, 0x2d,
14769 0x68, 0x6c, 0x73, 0x00,
14770 0x00, 0x00, 0x00, 0x00,
14771 0x00, 0x00, 0x00, 0x00,
14772 0x00, 0x00, 0x00, // compressorname
14773 0x00, 0x18, // depth = 24
14774 0x11, 0x11 // pre_defined = -1
14775 ]), box(types.avcC, new Uint8Array([
14776 0x01, // configurationVersion
14777 track.profileIdc, // AVCProfileIndication
14778 track.profileCompatibility, // profile_compatibility
14779 track.levelIdc, // AVCLevelIndication
14780 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
14781 ].concat([
14782 sps.length // numOfSequenceParameterSets
14783 ]).concat(sequenceParameterSets).concat([
14784 pps.length // numOfPictureParameterSets
14785 ]).concat(pictureParameterSets))), // "PPS"
14786 box(types.btrt, new Uint8Array([
14787 0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
14788 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
14789 0x00, 0x2d, 0xc6, 0xc0
14790 ])) // avgBitrate
14791 );
14792 };
14793
14794 audioSample = function(track) {
14795 return box(types.mp4a, new Uint8Array([
14796
14797 // SampleEntry, ISO/IEC 14496-12
14798 0x00, 0x00, 0x00,
14799 0x00, 0x00, 0x00, // reserved
14800 0x00, 0x01, // data_reference_index
14801
14802 // AudioSampleEntry, ISO/IEC 14496-12
14803 0x00, 0x00, 0x00, 0x00, // reserved
14804 0x00, 0x00, 0x00, 0x00, // reserved
14805 (track.channelcount & 0xff00) >> 8,
14806 (track.channelcount & 0xff), // channelcount
14807
14808 (track.samplesize & 0xff00) >> 8,
14809 (track.samplesize & 0xff), // samplesize
14810 0x00, 0x00, // pre_defined
14811 0x00, 0x00, // reserved
14812
14813 (track.samplerate & 0xff00) >> 8,
14814 (track.samplerate & 0xff),
14815 0x00, 0x00 // samplerate, 16.16
14816
14817 // MP4AudioSampleEntry, ISO/IEC 14496-14
14818 ]), esds(track));
14819 };
14820}());
14821
14822tkhd = function(track) {
14823 var result = new Uint8Array([
14824 0x00, // version 0
14825 0x00, 0x00, 0x07, // flags
14826 0x00, 0x00, 0x00, 0x00, // creation_time
14827 0x00, 0x00, 0x00, 0x00, // modification_time
14828 (track.id & 0xFF000000) >> 24,
14829 (track.id & 0xFF0000) >> 16,
14830 (track.id & 0xFF00) >> 8,
14831 track.id & 0xFF, // track_ID
14832 0x00, 0x00, 0x00, 0x00, // reserved
14833 (track.duration & 0xFF000000) >> 24,
14834 (track.duration & 0xFF0000) >> 16,
14835 (track.duration & 0xFF00) >> 8,
14836 track.duration & 0xFF, // duration
14837 0x00, 0x00, 0x00, 0x00,
14838 0x00, 0x00, 0x00, 0x00, // reserved
14839 0x00, 0x00, // layer
14840 0x00, 0x00, // alternate_group
14841 0x01, 0x00, // non-audio track volume
14842 0x00, 0x00, // reserved
14843 0x00, 0x01, 0x00, 0x00,
14844 0x00, 0x00, 0x00, 0x00,
14845 0x00, 0x00, 0x00, 0x00,
14846 0x00, 0x00, 0x00, 0x00,
14847 0x00, 0x01, 0x00, 0x00,
14848 0x00, 0x00, 0x00, 0x00,
14849 0x00, 0x00, 0x00, 0x00,
14850 0x00, 0x00, 0x00, 0x00,
14851 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
14852 (track.width & 0xFF00) >> 8,
14853 track.width & 0xFF,
14854 0x00, 0x00, // width
14855 (track.height & 0xFF00) >> 8,
14856 track.height & 0xFF,
14857 0x00, 0x00 // height
14858 ]);
14859
14860 return box(types.tkhd, result);
14861};
14862
14863/**
14864 * Generate a track fragment (traf) box. A traf box collects metadata
14865 * about tracks in a movie fragment (moof) box.
14866 */
14867traf = function(track) {
14868 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun,
14869 sampleDependencyTable, dataOffset,
14870 upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
14871
14872 trackFragmentHeader = box(types.tfhd, new Uint8Array([
14873 0x00, // version 0
14874 0x00, 0x00, 0x3a, // flags
14875 (track.id & 0xFF000000) >> 24,
14876 (track.id & 0xFF0000) >> 16,
14877 (track.id & 0xFF00) >> 8,
14878 (track.id & 0xFF), // track_ID
14879 0x00, 0x00, 0x00, 0x01, // sample_description_index
14880 0x00, 0x00, 0x00, 0x00, // default_sample_duration
14881 0x00, 0x00, 0x00, 0x00, // default_sample_size
14882 0x00, 0x00, 0x00, 0x00 // default_sample_flags
14883 ]));
14884
14885 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
14886 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
14887
14888 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([
14889 0x01, // version 1
14890 0x00, 0x00, 0x00, // flags
14891 // baseMediaDecodeTime
14892 (upperWordBaseMediaDecodeTime >>> 24) & 0xFF,
14893 (upperWordBaseMediaDecodeTime >>> 16) & 0xFF,
14894 (upperWordBaseMediaDecodeTime >>> 8) & 0xFF,
14895 upperWordBaseMediaDecodeTime & 0xFF,
14896 (lowerWordBaseMediaDecodeTime >>> 24) & 0xFF,
14897 (lowerWordBaseMediaDecodeTime >>> 16) & 0xFF,
14898 (lowerWordBaseMediaDecodeTime >>> 8) & 0xFF,
14899 lowerWordBaseMediaDecodeTime & 0xFF
14900 ]));
14901
14902 // the data offset specifies the number of bytes from the start of
14903 // the containing moof to the first payload byte of the associated
14904 // mdat
14905 dataOffset = (32 + // tfhd
14906 20 + // tfdt
14907 8 + // traf header
14908 16 + // mfhd
14909 8 + // moof header
14910 8); // mdat header
14911
14912 // audio tracks require less metadata
14913 if (track.type === 'audio') {
14914 trackFragmentRun = trun(track, dataOffset);
14915 return box(types.traf,
14916 trackFragmentHeader,
14917 trackFragmentDecodeTime,
14918 trackFragmentRun);
14919 }
14920
14921 // video tracks should contain an independent and disposable samples
14922 // box (sdtp)
14923 // generate one and adjust offsets to match
14924 sampleDependencyTable = sdtp(track);
14925 trackFragmentRun = trun(track,
14926 sampleDependencyTable.length + dataOffset);
14927 return box(types.traf,
14928 trackFragmentHeader,
14929 trackFragmentDecodeTime,
14930 trackFragmentRun,
14931 sampleDependencyTable);
14932};
14933
14934/**
14935 * Generate a track box.
14936 * @param track {object} a track definition
14937 * @return {Uint8Array} the track box
14938 */
14939trak = function(track) {
14940 track.duration = track.duration || 0xffffffff;
14941 return box(types.trak,
14942 tkhd(track),
14943 mdia(track));
14944};
14945
14946trex = function(track) {
14947 var result = new Uint8Array([
14948 0x00, // version 0
14949 0x00, 0x00, 0x00, // flags
14950 (track.id & 0xFF000000) >> 24,
14951 (track.id & 0xFF0000) >> 16,
14952 (track.id & 0xFF00) >> 8,
14953 (track.id & 0xFF), // track_ID
14954 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
14955 0x00, 0x00, 0x00, 0x00, // default_sample_duration
14956 0x00, 0x00, 0x00, 0x00, // default_sample_size
14957 0x00, 0x01, 0x00, 0x01 // default_sample_flags
14958 ]);
14959 // the last two bytes of default_sample_flags is the sample
14960 // degradation priority, a hint about the importance of this sample
14961 // relative to others. Lower the degradation priority for all sample
14962 // types other than video.
14963 if (track.type !== 'video') {
14964 result[result.length - 1] = 0x00;
14965 }
14966
14967 return box(types.trex, result);
14968};
14969
14970(function() {
14971 var audioTrun, videoTrun, trunHeader;
14972
14973 // This method assumes all samples are uniform. That is, if a
14974 // duration is present for the first sample, it will be present for
14975 // all subsequent samples.
14976 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
14977 trunHeader = function(samples, offset) {
14978 var durationPresent = 0, sizePresent = 0,
14979 flagsPresent = 0, compositionTimeOffset = 0;
14980
14981 // trun flag constants
14982 if (samples.length) {
14983 if (samples[0].duration !== undefined) {
14984 durationPresent = 0x1;
14985 }
14986 if (samples[0].size !== undefined) {
14987 sizePresent = 0x2;
14988 }
14989 if (samples[0].flags !== undefined) {
14990 flagsPresent = 0x4;
14991 }
14992 if (samples[0].compositionTimeOffset !== undefined) {
14993 compositionTimeOffset = 0x8;
14994 }
14995 }
14996
14997 return [
14998 0x00, // version 0
14999 0x00,
15000 durationPresent | sizePresent | flagsPresent | compositionTimeOffset,
15001 0x01, // flags
15002 (samples.length & 0xFF000000) >>> 24,
15003 (samples.length & 0xFF0000) >>> 16,
15004 (samples.length & 0xFF00) >>> 8,
15005 samples.length & 0xFF, // sample_count
15006 (offset & 0xFF000000) >>> 24,
15007 (offset & 0xFF0000) >>> 16,
15008 (offset & 0xFF00) >>> 8,
15009 offset & 0xFF // data_offset
15010 ];
15011 };
15012
15013 videoTrun = function(track, offset) {
15014 var bytes, samples, sample, i;
15015
15016 samples = track.samples || [];
15017 offset += 8 + 12 + (16 * samples.length);
15018
15019 bytes = trunHeader(samples, offset);
15020
15021 for (i = 0; i < samples.length; i++) {
15022 sample = samples[i];
15023 bytes = bytes.concat([
15024 (sample.duration & 0xFF000000) >>> 24,
15025 (sample.duration & 0xFF0000) >>> 16,
15026 (sample.duration & 0xFF00) >>> 8,
15027 sample.duration & 0xFF, // sample_duration
15028 (sample.size & 0xFF000000) >>> 24,
15029 (sample.size & 0xFF0000) >>> 16,
15030 (sample.size & 0xFF00) >>> 8,
15031 sample.size & 0xFF, // sample_size
15032 (sample.flags.isLeading << 2) | sample.flags.dependsOn,
15033 (sample.flags.isDependedOn << 6) |
15034 (sample.flags.hasRedundancy << 4) |
15035 (sample.flags.paddingValue << 1) |
15036 sample.flags.isNonSyncSample,
15037 sample.flags.degradationPriority & 0xF0 << 8,
15038 sample.flags.degradationPriority & 0x0F, // sample_flags
15039 (sample.compositionTimeOffset & 0xFF000000) >>> 24,
15040 (sample.compositionTimeOffset & 0xFF0000) >>> 16,
15041 (sample.compositionTimeOffset & 0xFF00) >>> 8,
15042 sample.compositionTimeOffset & 0xFF // sample_composition_time_offset
15043 ]);
15044 }
15045 return box(types.trun, new Uint8Array(bytes));
15046 };
15047
15048 audioTrun = function(track, offset) {
15049 var bytes, samples, sample, i;
15050
15051 samples = track.samples || [];
15052 offset += 8 + 12 + (8 * samples.length);
15053
15054 bytes = trunHeader(samples, offset);
15055
15056 for (i = 0; i < samples.length; i++) {
15057 sample = samples[i];
15058 bytes = bytes.concat([
15059 (sample.duration & 0xFF000000) >>> 24,
15060 (sample.duration & 0xFF0000) >>> 16,
15061 (sample.duration & 0xFF00) >>> 8,
15062 sample.duration & 0xFF, // sample_duration
15063 (sample.size & 0xFF000000) >>> 24,
15064 (sample.size & 0xFF0000) >>> 16,
15065 (sample.size & 0xFF00) >>> 8,
15066 sample.size & 0xFF]); // sample_size
15067 }
15068
15069 return box(types.trun, new Uint8Array(bytes));
15070 };
15071
15072 trun = function(track, offset) {
15073 if (track.type === 'audio') {
15074 return audioTrun(track, offset);
15075 }
15076
15077 return videoTrun(track, offset);
15078 };
15079}());
15080
15081module.exports = {
15082 ftyp: ftyp,
15083 mdat: mdat,
15084 moof: moof,
15085 moov: moov,
15086 initSegment: function(tracks) {
15087 var
15088 fileType = ftyp(),
15089 movie = moov(tracks),
15090 result;
15091
15092 result = new Uint8Array(fileType.byteLength + movie.byteLength);
15093 result.set(fileType);
15094 result.set(movie, fileType.byteLength);
15095 return result;
15096 }
15097};
15098
15099},{}],57:[function(require,module,exports){
15100/**
15101 * mux.js
15102 *
15103 * Copyright (c) 2015 Brightcove
15104 * All rights reserved.
15105 *
15106 * Utilities to detect basic properties and metadata about MP4s.
15107 */
15108'use strict';
15109
15110var findBox, parseType, timescale, startTime;
15111
15112// Find the data for a box specified by its path
15113findBox = function(data, path) {
15114 var results = [],
15115 i, size, type, end, subresults;
15116
15117 if (!path.length) {
15118 // short-circuit the search for empty paths
15119 return null;
15120 }
15121
15122 for (i = 0; i < data.byteLength;) {
15123 size = data[i] << 24;
15124 size |= data[i + 1] << 16;
15125 size |= data[i + 2] << 8;
15126 size |= data[i + 3];
15127
15128 type = parseType(data.subarray(i + 4, i + 8));
15129
15130 end = size > 1 ? i + size : data.byteLength;
15131
15132 if (type === path[0]) {
15133 if (path.length === 1) {
15134 // this is the end of the path and we've found the box we were
15135 // looking for
15136 results.push(data.subarray(i + 8, end));
15137 } else {
15138 // recursively search for the next box along the path
15139 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
15140 if (subresults.length) {
15141 results = results.concat(subresults);
15142 }
15143 }
15144 }
15145 i = end;
15146 }
15147
15148 // we've finished searching all of data
15149 return results;
15150};
15151
15152/**
15153 * Returns the string representation of an ASCII encoded four byte buffer.
15154 * @param buffer {Uint8Array} a four-byte buffer to translate
15155 * @return {string} the corresponding string
15156 */
15157parseType = function(buffer) {
15158 var result = '';
15159 result += String.fromCharCode(buffer[0]);
15160 result += String.fromCharCode(buffer[1]);
15161 result += String.fromCharCode(buffer[2]);
15162 result += String.fromCharCode(buffer[3]);
15163 return result;
15164};
15165
15166/**
15167 * Parses an MP4 initialization segment and extracts the timescale
15168 * values for any declared tracks. Timescale values indicate the
15169 * number of clock ticks per second to assume for time-based values
15170 * elsewhere in the MP4.
15171 *
15172 * To determine the start time of an MP4, you need two pieces of
15173 * information: the timescale unit and the earliest base media decode
15174 * time. Multiple timescales can be specified within an MP4 but the
15175 * base media decode time is always expressed in the timescale from
15176 * the media header box for the track:
15177 * ```
15178 * moov > trak > mdia > mdhd.timescale
15179 * ```
15180 * @param init {Uint8Array} the bytes of the init segment
15181 * @return {object} a hash of track ids to timescale values or null if
15182 * the init segment is malformed.
15183 */
15184timescale = function(init) {
15185 var
15186 result = {},
15187 traks = findBox(init, ['moov', 'trak']);
15188
15189 // mdhd timescale
15190 return traks.reduce(function(result, trak) {
15191 var tkhd, version, index, id, mdhd;
15192
15193 tkhd = findBox(trak, ['tkhd'])[0];
15194 if (!tkhd) {
15195 return null;
15196 }
15197 version = tkhd[0];
15198 index = version === 0 ? 12 : 20;
15199 id = tkhd[index] << 24 |
15200 tkhd[index + 1] << 16 |
15201 tkhd[index + 2] << 8 |
15202 tkhd[index + 3];
15203
15204 mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
15205 if (!mdhd) {
15206 return null;
15207 }
15208 version = mdhd[0];
15209 index = version === 0 ? 12 : 20;
15210 result[id] = mdhd[index] << 24 |
15211 mdhd[index + 1] << 16 |
15212 mdhd[index + 2] << 8 |
15213 mdhd[index + 3];
15214 return result;
15215 }, result);
15216};
15217
15218/**
15219 * Determine the base media decode start time, in seconds, for an MP4
15220 * fragment. If multiple fragments are specified, the earliest time is
15221 * returned.
15222 *
15223 * The base media decode time can be parsed from track fragment
15224 * metadata:
15225 * ```
15226 * moof > traf > tfdt.baseMediaDecodeTime
15227 * ```
15228 * It requires the timescale value from the mdhd to interpret.
15229 *
15230 * @param timescale {object} a hash of track ids to timescale values.
15231 * @return {number} the earliest base media decode start time for the
15232 * fragment, in seconds
15233 */
15234startTime = function(timescale, fragment) {
15235 var trafs, baseTimes, result;
15236
15237 // we need info from two childrend of each track fragment box
15238 trafs = findBox(fragment, ['moof', 'traf']);
15239
15240 // determine the start times for each track
15241 baseTimes = [].concat.apply([], trafs.map(function(traf) {
15242 return findBox(traf, ['tfhd']).map(function(tfhd) {
15243 var id, scale, baseTime;
15244
15245 // get the track id from the tfhd
15246 id = tfhd[4] << 24 |
15247 tfhd[5] << 16 |
15248 tfhd[6] << 8 |
15249 tfhd[7];
15250 // assume a 90kHz clock if no timescale was specified
15251 scale = timescale[id] || 90e3;
15252
15253 // get the base media decode time from the tfdt
15254 baseTime = findBox(traf, ['tfdt']).map(function(tfdt) {
15255 var version, result;
15256
15257 version = tfdt[0];
15258 result = tfdt[4] << 24 |
15259 tfdt[5] << 16 |
15260 tfdt[6] << 8 |
15261 tfdt[7];
15262 if (version === 1) {
15263 result *= Math.pow(2, 32);
15264 result += tfdt[8] << 24 |
15265 tfdt[9] << 16 |
15266 tfdt[10] << 8 |
15267 tfdt[11];
15268 }
15269 return result;
15270 })[0];
15271 baseTime = baseTime || Infinity;
15272
15273 // convert base time to seconds
15274 return baseTime / scale;
15275 });
15276 }));
15277
15278 // return the minimum
15279 result = Math.min.apply(null, baseTimes);
15280 return isFinite(result) ? result : 0;
15281};
15282
15283module.exports = {
15284 parseType: parseType,
15285 timescale: timescale,
15286 startTime: startTime
15287};
15288
15289},{}],58:[function(require,module,exports){
15290/**
15291 * mux.js
15292 *
15293 * Copyright (c) 2015 Brightcove
15294 * All rights reserved.
15295 *
15296 * A stream-based mp2t to mp4 converter. This utility can be used to
15297 * deliver mp4s to a SourceBuffer on platforms that support native
15298 * Media Source Extensions.
15299 */
15300'use strict';
15301
15302var Stream = require('../utils/stream.js');
15303var mp4 = require('./mp4-generator.js');
15304var m2ts = require('../m2ts/m2ts.js');
15305var AdtsStream = require('../codecs/adts.js');
15306var H264Stream = require('../codecs/h264').H264Stream;
15307var AacStream = require('../aac');
15308var coneOfSilence = require('../data/silence');
15309var clock = require('../utils/clock');
15310
15311// constants
15312var AUDIO_PROPERTIES = [
15313 'audioobjecttype',
15314 'channelcount',
15315 'samplerate',
15316 'samplingfrequencyindex',
15317 'samplesize'
15318];
15319
15320var VIDEO_PROPERTIES = [
15321 'width',
15322 'height',
15323 'profileIdc',
15324 'levelIdc',
15325 'profileCompatibility'
15326];
15327
15328var ONE_SECOND_IN_TS = 90000; // 90kHz clock
15329
15330// object types
15331var VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;
15332
15333// Helper functions
15334var
15335 createDefaultSample,
15336 isLikelyAacData,
15337 collectDtsInfo,
15338 clearDtsInfo,
15339 calculateTrackBaseMediaDecodeTime,
15340 arrayEquals,
15341 sumFrameByteLengths;
15342
15343/**
15344 * Default sample object
15345 * see ISO/IEC 14496-12:2012, section 8.6.4.3
15346 */
15347createDefaultSample = function() {
15348 return {
15349 size: 0,
15350 flags: {
15351 isLeading: 0,
15352 dependsOn: 1,
15353 isDependedOn: 0,
15354 hasRedundancy: 0,
15355 degradationPriority: 0
15356 }
15357 };
15358};
15359
15360isLikelyAacData = function(data) {
15361 if ((data[0] === 'I'.charCodeAt(0)) &&
15362 (data[1] === 'D'.charCodeAt(0)) &&
15363 (data[2] === '3'.charCodeAt(0))) {
15364 return true;
15365 }
15366 return false;
15367};
15368
15369/**
15370 * Compare two arrays (even typed) for same-ness
15371 */
15372arrayEquals = function(a, b) {
15373 var
15374 i;
15375
15376 if (a.length !== b.length) {
15377 return false;
15378 }
15379
15380 // compare the value of each element in the array
15381 for (i = 0; i < a.length; i++) {
15382 if (a[i] !== b[i]) {
15383 return false;
15384 }
15385 }
15386
15387 return true;
15388};
15389
15390/**
15391 * Sum the `byteLength` properties of the data in each AAC frame
15392 */
15393sumFrameByteLengths = function(array) {
15394 var
15395 i,
15396 currentObj,
15397 sum = 0;
15398
15399 // sum the byteLength's all each nal unit in the frame
15400 for (i = 0; i < array.length; i++) {
15401 currentObj = array[i];
15402 sum += currentObj.data.byteLength;
15403 }
15404
15405 return sum;
15406};
15407
15408/**
15409 * Constructs a single-track, ISO BMFF media segment from AAC data
15410 * events. The output of this stream can be fed to a SourceBuffer
15411 * configured with a suitable initialization segment.
15412 */
15413AudioSegmentStream = function(track) {
15414 var
15415 adtsFrames = [],
15416 sequenceNumber = 0,
15417 earliestAllowedDts = 0,
15418 audioAppendStartTs = 0,
15419 videoBaseMediaDecodeTime = Infinity;
15420
15421 AudioSegmentStream.prototype.init.call(this);
15422
15423 this.push = function(data) {
15424 collectDtsInfo(track, data);
15425
15426 if (track) {
15427 AUDIO_PROPERTIES.forEach(function(prop) {
15428 track[prop] = data[prop];
15429 });
15430 }
15431
15432 // buffer audio data until end() is called
15433 adtsFrames.push(data);
15434 };
15435
15436 this.setEarliestDts = function(earliestDts) {
15437 earliestAllowedDts = earliestDts - track.timelineStartInfo.baseMediaDecodeTime;
15438 };
15439
15440 this.setVideoBaseMediaDecodeTime = function(baseMediaDecodeTime) {
15441 videoBaseMediaDecodeTime = baseMediaDecodeTime;
15442 };
15443
15444 this.setAudioAppendStart = function(timestamp) {
15445 audioAppendStartTs = timestamp;
15446 };
15447
15448 this.flush = function() {
15449 var
15450 frames,
15451 moof,
15452 mdat,
15453 boxes;
15454
15455 // return early if no audio data has been observed
15456 if (adtsFrames.length === 0) {
15457 this.trigger('done', 'AudioSegmentStream');
15458 return;
15459 }
15460
15461 frames = this.trimAdtsFramesByEarliestDts_(adtsFrames);
15462 track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
15463
15464 this.prefixWithSilence_(track, frames);
15465
15466 // we have to build the index from byte locations to
15467 // samples (that is, adts frames) in the audio data
15468 track.samples = this.generateSampleTable_(frames);
15469
15470 // concatenate the audio data to constuct the mdat
15471 mdat = mp4.mdat(this.concatenateFrameData_(frames));
15472
15473 adtsFrames = [];
15474
15475 moof = mp4.moof(sequenceNumber, [track]);
15476 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
15477
15478 // bump the sequence number for next time
15479 sequenceNumber++;
15480
15481 boxes.set(moof);
15482 boxes.set(mdat, moof.byteLength);
15483
15484 clearDtsInfo(track);
15485
15486 this.trigger('data', {track: track, boxes: boxes});
15487 this.trigger('done', 'AudioSegmentStream');
15488 };
15489
15490 // Possibly pad (prefix) the audio track with silence if appending this track
15491 // would lead to the introduction of a gap in the audio buffer
15492 this.prefixWithSilence_ = function(track, frames) {
15493 var
15494 baseMediaDecodeTimeTs,
15495 frameDuration = 0,
15496 audioGapDuration = 0,
15497 audioFillFrameCount = 0,
15498 audioFillDuration = 0,
15499 silentFrame,
15500 i;
15501
15502 if (!frames.length) {
15503 return;
15504 }
15505
15506 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
15507 // determine frame clock duration based on sample rate, round up to avoid overfills
15508 frameDuration = Math.ceil(ONE_SECOND_IN_TS / (track.samplerate / 1024));
15509
15510 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
15511 // insert the shortest possible amount (audio gap or audio to video gap)
15512 audioGapDuration =
15513 baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
15514 // number of full frames in the audio gap
15515 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
15516 audioFillDuration = audioFillFrameCount * frameDuration;
15517 }
15518
15519 // don't attempt to fill gaps smaller than a single frame or larger
15520 // than a half second
15521 if (audioFillFrameCount < 1 || audioFillDuration > ONE_SECOND_IN_TS / 2) {
15522 return;
15523 }
15524
15525 silentFrame = coneOfSilence[track.samplerate];
15526
15527 if (!silentFrame) {
15528 // we don't have a silent frame pregenerated for the sample rate, so use a frame
15529 // from the content instead
15530 silentFrame = frames[0].data;
15531 }
15532
15533 for (i = 0; i < audioFillFrameCount; i++) {
15534 frames.splice(i, 0, {
15535 data: silentFrame
15536 });
15537 }
15538
15539 track.baseMediaDecodeTime -=
15540 Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
15541 };
15542
15543 // If the audio segment extends before the earliest allowed dts
15544 // value, remove AAC frames until starts at or after the earliest
15545 // allowed DTS so that we don't end up with a negative baseMedia-
15546 // DecodeTime for the audio track
15547 this.trimAdtsFramesByEarliestDts_ = function(adtsFrames) {
15548 if (track.minSegmentDts >= earliestAllowedDts) {
15549 return adtsFrames;
15550 }
15551
15552 // We will need to recalculate the earliest segment Dts
15553 track.minSegmentDts = Infinity;
15554
15555 return adtsFrames.filter(function(currentFrame) {
15556 // If this is an allowed frame, keep it and record it's Dts
15557 if (currentFrame.dts >= earliestAllowedDts) {
15558 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
15559 track.minSegmentPts = track.minSegmentDts;
15560 return true;
15561 }
15562 // Otherwise, discard it
15563 return false;
15564 });
15565 };
15566
15567 // generate the track's raw mdat data from an array of frames
15568 this.generateSampleTable_ = function(frames) {
15569 var
15570 i,
15571 currentFrame,
15572 samples = [];
15573
15574 for (i = 0; i < frames.length; i++) {
15575 currentFrame = frames[i];
15576 samples.push({
15577 size: currentFrame.data.byteLength,
15578 duration: 1024 // For AAC audio, all samples contain 1024 samples
15579 });
15580 }
15581 return samples;
15582 };
15583
15584 // generate the track's sample table from an array of frames
15585 this.concatenateFrameData_ = function(frames) {
15586 var
15587 i,
15588 currentFrame,
15589 dataOffset = 0,
15590 data = new Uint8Array(sumFrameByteLengths(frames));
15591
15592 for (i = 0; i < frames.length; i++) {
15593 currentFrame = frames[i];
15594
15595 data.set(currentFrame.data, dataOffset);
15596 dataOffset += currentFrame.data.byteLength;
15597 }
15598 return data;
15599 };
15600};
15601
15602AudioSegmentStream.prototype = new Stream();
15603
15604/**
15605 * Constructs a single-track, ISO BMFF media segment from H264 data
15606 * events. The output of this stream can be fed to a SourceBuffer
15607 * configured with a suitable initialization segment.
15608 * @param track {object} track metadata configuration
15609 * @param options {object} transmuxer options object
15610 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
15611 * gopsToAlignWith list when attempting to align gop pts
15612 */
15613VideoSegmentStream = function(track, options) {
15614 var
15615 sequenceNumber = 0,
15616 nalUnits = [],
15617 gopsToAlignWith = [],
15618 config,
15619 pps;
15620
15621 options = options || {};
15622
15623 VideoSegmentStream.prototype.init.call(this);
15624
15625 delete track.minPTS;
15626
15627 this.gopCache_ = [];
15628
15629 this.push = function(nalUnit) {
15630 collectDtsInfo(track, nalUnit);
15631
15632 // record the track config
15633 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
15634 config = nalUnit.config;
15635 track.sps = [nalUnit.data];
15636
15637 VIDEO_PROPERTIES.forEach(function(prop) {
15638 track[prop] = config[prop];
15639 }, this);
15640 }
15641
15642 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' &&
15643 !pps) {
15644 pps = nalUnit.data;
15645 track.pps = [nalUnit.data];
15646 }
15647
15648 // buffer video until flush() is called
15649 nalUnits.push(nalUnit);
15650 };
15651
15652 this.flush = function() {
15653 var
15654 frames,
15655 gopForFusion,
15656 gops,
15657 moof,
15658 mdat,
15659 boxes;
15660
15661 // Throw away nalUnits at the start of the byte stream until
15662 // we find the first AUD
15663 while (nalUnits.length) {
15664 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
15665 break;
15666 }
15667 nalUnits.shift();
15668 }
15669
15670 // Return early if no video data has been observed
15671 if (nalUnits.length === 0) {
15672 this.resetStream_();
15673 this.trigger('done', 'VideoSegmentStream');
15674 return;
15675 }
15676
15677 // Organize the raw nal-units into arrays that represent
15678 // higher-level constructs such as frames and gops
15679 // (group-of-pictures)
15680 frames = this.groupNalsIntoFrames_(nalUnits);
15681 gops = this.groupFramesIntoGops_(frames);
15682
15683 // If the first frame of this fragment is not a keyframe we have
15684 // a problem since MSE (on Chrome) requires a leading keyframe.
15685 //
15686 // We have two approaches to repairing this situation:
15687 // 1) GOP-FUSION:
15688 // This is where we keep track of the GOPS (group-of-pictures)
15689 // from previous fragments and attempt to find one that we can
15690 // prepend to the current fragment in order to create a valid
15691 // fragment.
15692 // 2) KEYFRAME-PULLING:
15693 // Here we search for the first keyframe in the fragment and
15694 // throw away all the frames between the start of the fragment
15695 // and that keyframe. We then extend the duration and pull the
15696 // PTS of the keyframe forward so that it covers the time range
15697 // of the frames that were disposed of.
15698 //
15699 // #1 is far prefereable over #2 which can cause "stuttering" but
15700 // requires more things to be just right.
15701 if (!gops[0][0].keyFrame) {
15702 // Search for a gop for fusion from our gopCache
15703 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
15704
15705 if (gopForFusion) {
15706 gops.unshift(gopForFusion);
15707 // Adjust Gops' metadata to account for the inclusion of the
15708 // new gop at the beginning
15709 gops.byteLength += gopForFusion.byteLength;
15710 gops.nalCount += gopForFusion.nalCount;
15711 gops.pts = gopForFusion.pts;
15712 gops.dts = gopForFusion.dts;
15713 gops.duration += gopForFusion.duration;
15714 } else {
15715 // If we didn't find a candidate gop fall back to keyrame-pulling
15716 gops = this.extendFirstKeyFrame_(gops);
15717 }
15718 }
15719
15720 // Trim gops to align with gopsToAlignWith
15721 if (gopsToAlignWith.length) {
15722 var alignedGops;
15723
15724 if (options.alignGopsAtEnd) {
15725 alignedGops = this.alignGopsAtEnd_(gops);
15726 } else {
15727 alignedGops = this.alignGopsAtStart_(gops);
15728 }
15729
15730 if (!alignedGops) {
15731 // save all the nals in the last GOP into the gop cache
15732 this.gopCache_.unshift({
15733 gop: gops.pop(),
15734 pps: track.pps,
15735 sps: track.sps
15736 });
15737
15738 // Keep a maximum of 6 GOPs in the cache
15739 this.gopCache_.length = Math.min(6, this.gopCache_.length);
15740
15741 // Clear nalUnits
15742 nalUnits = [];
15743
15744 // return early no gops can be aligned with desired gopsToAlignWith
15745 this.resetStream_();
15746 this.trigger('done', 'VideoSegmentStream');
15747 return;
15748 }
15749
15750 // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
15751 // when recalculated before sending off to CoalesceStream
15752 clearDtsInfo(track);
15753
15754 gops = alignedGops;
15755 }
15756
15757 collectDtsInfo(track, gops);
15758
15759 // First, we have to build the index from byte locations to
15760 // samples (that is, frames) in the video data
15761 track.samples = this.generateSampleTable_(gops);
15762
15763 // Concatenate the video data and construct the mdat
15764 mdat = mp4.mdat(this.concatenateNalData_(gops));
15765
15766 track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
15767
15768 this.trigger('processedGopsInfo', gops.map(function(gop) {
15769 return {
15770 pts: gop.pts,
15771 dts: gop.dts,
15772 byteLength: gop.byteLength
15773 };
15774 }));
15775
15776 // save all the nals in the last GOP into the gop cache
15777 this.gopCache_.unshift({
15778 gop: gops.pop(),
15779 pps: track.pps,
15780 sps: track.sps
15781 });
15782
15783 // Keep a maximum of 6 GOPs in the cache
15784 this.gopCache_.length = Math.min(6, this.gopCache_.length);
15785
15786 // Clear nalUnits
15787 nalUnits = [];
15788
15789 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
15790 this.trigger('timelineStartInfo', track.timelineStartInfo);
15791
15792 moof = mp4.moof(sequenceNumber, [track]);
15793
15794 // it would be great to allocate this array up front instead of
15795 // throwing away hundreds of media segment fragments
15796 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
15797
15798 // Bump the sequence number for next time
15799 sequenceNumber++;
15800
15801 boxes.set(moof);
15802 boxes.set(mdat, moof.byteLength);
15803
15804 this.trigger('data', {track: track, boxes: boxes});
15805
15806 this.resetStream_();
15807
15808 // Continue with the flush process now
15809 this.trigger('done', 'VideoSegmentStream');
15810 };
15811
15812 this.resetStream_ = function() {
15813 clearDtsInfo(track);
15814
15815 // reset config and pps because they may differ across segments
15816 // for instance, when we are rendition switching
15817 config = undefined;
15818 pps = undefined;
15819 };
15820
15821 // Search for a candidate Gop for gop-fusion from the gop cache and
15822 // return it or return null if no good candidate was found
15823 this.getGopForFusion_ = function(nalUnit) {
15824 var
15825 halfSecond = 45000, // Half-a-second in a 90khz clock
15826 allowableOverlap = 10000, // About 3 frames @ 30fps
15827 nearestDistance = Infinity,
15828 dtsDistance,
15829 nearestGopObj,
15830 currentGop,
15831 currentGopObj,
15832 i;
15833
15834 // Search for the GOP nearest to the beginning of this nal unit
15835 for (i = 0; i < this.gopCache_.length; i++) {
15836 currentGopObj = this.gopCache_[i];
15837 currentGop = currentGopObj.gop;
15838
15839 // Reject Gops with different SPS or PPS
15840 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) ||
15841 !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
15842 continue;
15843 }
15844
15845 // Reject Gops that would require a negative baseMediaDecodeTime
15846 if (currentGop.dts < track.timelineStartInfo.dts) {
15847 continue;
15848 }
15849
15850 // The distance between the end of the gop and the start of the nalUnit
15851 dtsDistance = (nalUnit.dts - currentGop.dts) - currentGop.duration;
15852
15853 // Only consider GOPS that start before the nal unit and end within
15854 // a half-second of the nal unit
15855 if (dtsDistance >= -allowableOverlap &&
15856 dtsDistance <= halfSecond) {
15857
15858 // Always use the closest GOP we found if there is more than
15859 // one candidate
15860 if (!nearestGopObj ||
15861 nearestDistance > dtsDistance) {
15862 nearestGopObj = currentGopObj;
15863 nearestDistance = dtsDistance;
15864 }
15865 }
15866 }
15867
15868 if (nearestGopObj) {
15869 return nearestGopObj.gop;
15870 }
15871 return null;
15872 };
15873
15874 this.extendFirstKeyFrame_ = function(gops) {
15875 var currentGop;
15876
15877 if (!gops[0][0].keyFrame && gops.length > 1) {
15878 // Remove the first GOP
15879 currentGop = gops.shift();
15880
15881 gops.byteLength -= currentGop.byteLength;
15882 gops.nalCount -= currentGop.nalCount;
15883
15884 // Extend the first frame of what is now the
15885 // first gop to cover the time period of the
15886 // frames we just removed
15887 gops[0][0].dts = currentGop.dts;
15888 gops[0][0].pts = currentGop.pts;
15889 gops[0][0].duration += currentGop.duration;
15890 }
15891
15892 return gops;
15893 };
15894
15895 // Convert an array of nal units into an array of frames with each frame being
15896 // composed of the nal units that make up that frame
15897 // Also keep track of cummulative data about the frame from the nal units such
15898 // as the frame duration, starting pts, etc.
15899 this.groupNalsIntoFrames_ = function(nalUnits) {
15900 var
15901 i,
15902 currentNal,
15903 currentFrame = [],
15904 frames = [];
15905
15906 currentFrame.byteLength = 0;
15907
15908 for (i = 0; i < nalUnits.length; i++) {
15909 currentNal = nalUnits[i];
15910
15911 // Split on 'aud'-type nal units
15912 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
15913 // Since the very first nal unit is expected to be an AUD
15914 // only push to the frames array when currentFrame is not empty
15915 if (currentFrame.length) {
15916 currentFrame.duration = currentNal.dts - currentFrame.dts;
15917 frames.push(currentFrame);
15918 }
15919 currentFrame = [currentNal];
15920 currentFrame.byteLength = currentNal.data.byteLength;
15921 currentFrame.pts = currentNal.pts;
15922 currentFrame.dts = currentNal.dts;
15923 } else {
15924 // Specifically flag key frames for ease of use later
15925 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
15926 currentFrame.keyFrame = true;
15927 }
15928 currentFrame.duration = currentNal.dts - currentFrame.dts;
15929 currentFrame.byteLength += currentNal.data.byteLength;
15930 currentFrame.push(currentNal);
15931 }
15932 }
15933
15934 // For the last frame, use the duration of the previous frame if we
15935 // have nothing better to go on
15936 if (frames.length &&
15937 (!currentFrame.duration ||
15938 currentFrame.duration <= 0)) {
15939 currentFrame.duration = frames[frames.length - 1].duration;
15940 }
15941
15942 // Push the final frame
15943 frames.push(currentFrame);
15944 return frames;
15945 };
15946
15947 // Convert an array of frames into an array of Gop with each Gop being composed
15948 // of the frames that make up that Gop
15949 // Also keep track of cummulative data about the Gop from the frames such as the
15950 // Gop duration, starting pts, etc.
15951 this.groupFramesIntoGops_ = function(frames) {
15952 var
15953 i,
15954 currentFrame,
15955 currentGop = [],
15956 gops = [];
15957
15958 // We must pre-set some of the values on the Gop since we
15959 // keep running totals of these values
15960 currentGop.byteLength = 0;
15961 currentGop.nalCount = 0;
15962 currentGop.duration = 0;
15963 currentGop.pts = frames[0].pts;
15964 currentGop.dts = frames[0].dts;
15965
15966 // store some metadata about all the Gops
15967 gops.byteLength = 0;
15968 gops.nalCount = 0;
15969 gops.duration = 0;
15970 gops.pts = frames[0].pts;
15971 gops.dts = frames[0].dts;
15972
15973 for (i = 0; i < frames.length; i++) {
15974 currentFrame = frames[i];
15975
15976 if (currentFrame.keyFrame) {
15977 // Since the very first frame is expected to be an keyframe
15978 // only push to the gops array when currentGop is not empty
15979 if (currentGop.length) {
15980 gops.push(currentGop);
15981 gops.byteLength += currentGop.byteLength;
15982 gops.nalCount += currentGop.nalCount;
15983 gops.duration += currentGop.duration;
15984 }
15985
15986 currentGop = [currentFrame];
15987 currentGop.nalCount = currentFrame.length;
15988 currentGop.byteLength = currentFrame.byteLength;
15989 currentGop.pts = currentFrame.pts;
15990 currentGop.dts = currentFrame.dts;
15991 currentGop.duration = currentFrame.duration;
15992 } else {
15993 currentGop.duration += currentFrame.duration;
15994 currentGop.nalCount += currentFrame.length;
15995 currentGop.byteLength += currentFrame.byteLength;
15996 currentGop.push(currentFrame);
15997 }
15998 }
15999
16000 if (gops.length && currentGop.duration <= 0) {
16001 currentGop.duration = gops[gops.length - 1].duration;
16002 }
16003 gops.byteLength += currentGop.byteLength;
16004 gops.nalCount += currentGop.nalCount;
16005 gops.duration += currentGop.duration;
16006
16007 // push the final Gop
16008 gops.push(currentGop);
16009 return gops;
16010 };
16011
16012 // generate the track's sample table from an array of gops
16013 this.generateSampleTable_ = function(gops, baseDataOffset) {
16014 var
16015 h, i,
16016 sample,
16017 currentGop,
16018 currentFrame,
16019 dataOffset = baseDataOffset || 0,
16020 samples = [];
16021
16022 for (h = 0; h < gops.length; h++) {
16023 currentGop = gops[h];
16024
16025 for (i = 0; i < currentGop.length; i++) {
16026 currentFrame = currentGop[i];
16027
16028 sample = createDefaultSample();
16029
16030 sample.dataOffset = dataOffset;
16031 sample.compositionTimeOffset = currentFrame.pts - currentFrame.dts;
16032 sample.duration = currentFrame.duration;
16033 sample.size = 4 * currentFrame.length; // Space for nal unit size
16034 sample.size += currentFrame.byteLength;
16035
16036 if (currentFrame.keyFrame) {
16037 sample.flags.dependsOn = 2;
16038 }
16039
16040 dataOffset += sample.size;
16041
16042 samples.push(sample);
16043 }
16044 }
16045 return samples;
16046 };
16047
16048 // generate the track's raw mdat data from an array of gops
16049 this.concatenateNalData_ = function(gops) {
16050 var
16051 h, i, j,
16052 currentGop,
16053 currentFrame,
16054 currentNal,
16055 dataOffset = 0,
16056 nalsByteLength = gops.byteLength,
16057 numberOfNals = gops.nalCount,
16058 totalByteLength = nalsByteLength + 4 * numberOfNals,
16059 data = new Uint8Array(totalByteLength),
16060 view = new DataView(data.buffer);
16061
16062 // For each Gop..
16063 for (h = 0; h < gops.length; h++) {
16064 currentGop = gops[h];
16065
16066 // For each Frame..
16067 for (i = 0; i < currentGop.length; i++) {
16068 currentFrame = currentGop[i];
16069
16070 // For each NAL..
16071 for (j = 0; j < currentFrame.length; j++) {
16072 currentNal = currentFrame[j];
16073
16074 view.setUint32(dataOffset, currentNal.data.byteLength);
16075 dataOffset += 4;
16076 data.set(currentNal.data, dataOffset);
16077 dataOffset += currentNal.data.byteLength;
16078 }
16079 }
16080 }
16081 return data;
16082 };
16083
16084 // trim gop list to the first gop found that has a matching pts with a gop in the list
16085 // of gopsToAlignWith starting from the START of the list
16086 this.alignGopsAtStart_ = function(gops) {
16087 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
16088
16089 byteLength = gops.byteLength;
16090 nalCount = gops.nalCount;
16091 duration = gops.duration;
16092 alignIndex = gopIndex = 0;
16093
16094 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
16095 align = gopsToAlignWith[alignIndex];
16096 gop = gops[gopIndex];
16097
16098 if (align.pts === gop.pts) {
16099 break;
16100 }
16101
16102 if (gop.pts > align.pts) {
16103 // this current gop starts after the current gop we want to align on, so increment
16104 // align index
16105 alignIndex++;
16106 continue;
16107 }
16108
16109 // current gop starts before the current gop we want to align on. so increment gop
16110 // index
16111 gopIndex++;
16112 byteLength -= gop.byteLength;
16113 nalCount -= gop.nalCount;
16114 duration -= gop.duration;
16115 }
16116
16117 if (gopIndex === 0) {
16118 // no gops to trim
16119 return gops;
16120 }
16121
16122 if (gopIndex === gops.length) {
16123 // all gops trimmed, skip appending all gops
16124 return null;
16125 }
16126
16127 alignedGops = gops.slice(gopIndex);
16128 alignedGops.byteLength = byteLength;
16129 alignedGops.duration = duration;
16130 alignedGops.nalCount = nalCount;
16131 alignedGops.pts = alignedGops[0].pts;
16132 alignedGops.dts = alignedGops[0].dts;
16133
16134 return alignedGops;
16135 };
16136
16137 // trim gop list to the first gop found that has a matching pts with a gop in the list
16138 // of gopsToAlignWith starting from the END of the list
16139 this.alignGopsAtEnd_ = function(gops) {
16140 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
16141
16142 alignIndex = gopsToAlignWith.length - 1;
16143 gopIndex = gops.length - 1;
16144 alignEndIndex = null;
16145 matchFound = false;
16146
16147 while (alignIndex >= 0 && gopIndex >= 0) {
16148 align = gopsToAlignWith[alignIndex];
16149 gop = gops[gopIndex];
16150
16151 if (align.pts === gop.pts) {
16152 matchFound = true;
16153 break;
16154 }
16155
16156 if (align.pts > gop.pts) {
16157 alignIndex--;
16158 continue;
16159 }
16160
16161 if (alignIndex === gopsToAlignWith.length - 1) {
16162 // gop.pts is greater than the last alignment candidate. If no match is found
16163 // by the end of this loop, we still want to append gops that come after this
16164 // point
16165 alignEndIndex = gopIndex;
16166 }
16167
16168 gopIndex--;
16169 }
16170
16171 if (!matchFound && alignEndIndex === null) {
16172 return null;
16173 }
16174
16175 var trimIndex;
16176
16177 if (matchFound) {
16178 trimIndex = gopIndex;
16179 } else {
16180 trimIndex = alignEndIndex;
16181 }
16182
16183 if (trimIndex === 0) {
16184 return gops;
16185 }
16186
16187 var alignedGops = gops.slice(trimIndex);
16188 var metadata = alignedGops.reduce(function(total, gop) {
16189 total.byteLength += gop.byteLength;
16190 total.duration += gop.duration;
16191 total.nalCount += gop.nalCount;
16192 return total;
16193 }, { byteLength: 0, duration: 0, nalCount: 0 });
16194
16195 alignedGops.byteLength = metadata.byteLength;
16196 alignedGops.duration = metadata.duration;
16197 alignedGops.nalCount = metadata.nalCount;
16198 alignedGops.pts = alignedGops[0].pts;
16199 alignedGops.dts = alignedGops[0].dts;
16200
16201 return alignedGops;
16202 };
16203
16204 this.alignGopsWith = function(newGopsToAlignWith) {
16205 gopsToAlignWith = newGopsToAlignWith;
16206 };
16207};
16208
16209VideoSegmentStream.prototype = new Stream();
16210
16211/**
16212 * Store information about the start and end of the track and the
16213 * duration for each frame/sample we process in order to calculate
16214 * the baseMediaDecodeTime
16215 */
16216collectDtsInfo = function(track, data) {
16217 if (typeof data.pts === 'number') {
16218 if (track.timelineStartInfo.pts === undefined) {
16219 track.timelineStartInfo.pts = data.pts;
16220 }
16221
16222 if (track.minSegmentPts === undefined) {
16223 track.minSegmentPts = data.pts;
16224 } else {
16225 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
16226 }
16227
16228 if (track.maxSegmentPts === undefined) {
16229 track.maxSegmentPts = data.pts;
16230 } else {
16231 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
16232 }
16233 }
16234
16235 if (typeof data.dts === 'number') {
16236 if (track.timelineStartInfo.dts === undefined) {
16237 track.timelineStartInfo.dts = data.dts;
16238 }
16239
16240 if (track.minSegmentDts === undefined) {
16241 track.minSegmentDts = data.dts;
16242 } else {
16243 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
16244 }
16245
16246 if (track.maxSegmentDts === undefined) {
16247 track.maxSegmentDts = data.dts;
16248 } else {
16249 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
16250 }
16251 }
16252};
16253
16254/**
16255 * Clear values used to calculate the baseMediaDecodeTime between
16256 * tracks
16257 */
16258clearDtsInfo = function(track) {
16259 delete track.minSegmentDts;
16260 delete track.maxSegmentDts;
16261 delete track.minSegmentPts;
16262 delete track.maxSegmentPts;
16263};
16264
16265/**
16266 * Calculate the track's baseMediaDecodeTime based on the earliest
16267 * DTS the transmuxer has ever seen and the minimum DTS for the
16268 * current track
16269 */
16270calculateTrackBaseMediaDecodeTime = function(track) {
16271 var
16272 baseMediaDecodeTime,
16273 scale,
16274 // Calculate the distance, in time, that this segment starts from the start
16275 // of the timeline (earliest time seen since the transmuxer initialized)
16276 timeSinceStartOfTimeline = track.minSegmentDts - track.timelineStartInfo.dts;
16277
16278 // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
16279 // we want the start of the first segment to be placed
16280 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
16281
16282 // Add to that the distance this segment is from the very first
16283 baseMediaDecodeTime += timeSinceStartOfTimeline;
16284
16285 // baseMediaDecodeTime must not become negative
16286 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
16287
16288 if (track.type === 'audio') {
16289 // Audio has a different clock equal to the sampling_rate so we need to
16290 // scale the PTS values into the clock rate of the track
16291 scale = track.samplerate / ONE_SECOND_IN_TS;
16292 baseMediaDecodeTime *= scale;
16293 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
16294 }
16295
16296 return baseMediaDecodeTime;
16297};
16298
16299/**
16300 * A Stream that can combine multiple streams (ie. audio & video)
16301 * into a single output segment for MSE. Also supports audio-only
16302 * and video-only streams.
16303 */
16304CoalesceStream = function(options, metadataStream) {
16305 // Number of Tracks per output segment
16306 // If greater than 1, we combine multiple
16307 // tracks into a single segment
16308 this.numberOfTracks = 0;
16309 this.metadataStream = metadataStream;
16310
16311 if (typeof options.remux !== 'undefined') {
16312 this.remuxTracks = !!options.remux;
16313 } else {
16314 this.remuxTracks = true;
16315 }
16316
16317 this.pendingTracks = [];
16318 this.videoTrack = null;
16319 this.pendingBoxes = [];
16320 this.pendingCaptions = [];
16321 this.pendingMetadata = [];
16322 this.pendingBytes = 0;
16323 this.emittedTracks = 0;
16324
16325 CoalesceStream.prototype.init.call(this);
16326
16327 // Take output from multiple
16328 this.push = function(output) {
16329 // buffer incoming captions until the associated video segment
16330 // finishes
16331 if (output.text) {
16332 return this.pendingCaptions.push(output);
16333 }
16334 // buffer incoming id3 tags until the final flush
16335 if (output.frames) {
16336 return this.pendingMetadata.push(output);
16337 }
16338
16339 // Add this track to the list of pending tracks and store
16340 // important information required for the construction of
16341 // the final segment
16342 this.pendingTracks.push(output.track);
16343 this.pendingBoxes.push(output.boxes);
16344 this.pendingBytes += output.boxes.byteLength;
16345
16346 if (output.track.type === 'video') {
16347 this.videoTrack = output.track;
16348 }
16349 if (output.track.type === 'audio') {
16350 this.audioTrack = output.track;
16351 }
16352 };
16353};
16354
16355CoalesceStream.prototype = new Stream();
16356CoalesceStream.prototype.flush = function(flushSource) {
16357 var
16358 offset = 0,
16359 event = {
16360 captions: [],
16361 captionStreams: {},
16362 metadata: [],
16363 info: {}
16364 },
16365 caption,
16366 id3,
16367 initSegment,
16368 timelineStartPts = 0,
16369 i;
16370
16371 if (this.pendingTracks.length < this.numberOfTracks) {
16372 if (flushSource !== 'VideoSegmentStream' &&
16373 flushSource !== 'AudioSegmentStream') {
16374 // Return because we haven't received a flush from a data-generating
16375 // portion of the segment (meaning that we have only recieved meta-data
16376 // or captions.)
16377 return;
16378 } else if (this.remuxTracks) {
16379 // Return until we have enough tracks from the pipeline to remux (if we
16380 // are remuxing audio and video into a single MP4)
16381 return;
16382 } else if (this.pendingTracks.length === 0) {
16383 // In the case where we receive a flush without any data having been
16384 // received we consider it an emitted track for the purposes of coalescing
16385 // `done` events.
16386 // We do this for the case where there is an audio and video track in the
16387 // segment but no audio data. (seen in several playlists with alternate
16388 // audio tracks and no audio present in the main TS segments.)
16389 this.emittedTracks++;
16390
16391 if (this.emittedTracks >= this.numberOfTracks) {
16392 this.trigger('done');
16393 this.emittedTracks = 0;
16394 }
16395 return;
16396 }
16397 }
16398
16399 if (this.videoTrack) {
16400 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
16401 VIDEO_PROPERTIES.forEach(function(prop) {
16402 event.info[prop] = this.videoTrack[prop];
16403 }, this);
16404 } else if (this.audioTrack) {
16405 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
16406 AUDIO_PROPERTIES.forEach(function(prop) {
16407 event.info[prop] = this.audioTrack[prop];
16408 }, this);
16409 }
16410
16411 if (this.pendingTracks.length === 1) {
16412 event.type = this.pendingTracks[0].type;
16413 } else {
16414 event.type = 'combined';
16415 }
16416
16417 this.emittedTracks += this.pendingTracks.length;
16418
16419 initSegment = mp4.initSegment(this.pendingTracks);
16420
16421 // Create a new typed array to hold the init segment
16422 event.initSegment = new Uint8Array(initSegment.byteLength);
16423
16424 // Create an init segment containing a moov
16425 // and track definitions
16426 event.initSegment.set(initSegment);
16427
16428 // Create a new typed array to hold the moof+mdats
16429 event.data = new Uint8Array(this.pendingBytes);
16430
16431 // Append each moof+mdat (one per track) together
16432 for (i = 0; i < this.pendingBoxes.length; i++) {
16433 event.data.set(this.pendingBoxes[i], offset);
16434 offset += this.pendingBoxes[i].byteLength;
16435 }
16436
16437 // Translate caption PTS times into second offsets into the
16438 // video timeline for the segment, and add track info
16439 for (i = 0; i < this.pendingCaptions.length; i++) {
16440 caption = this.pendingCaptions[i];
16441 caption.startTime = (caption.startPts - timelineStartPts);
16442 caption.startTime /= 90e3;
16443 caption.endTime = (caption.endPts - timelineStartPts);
16444 caption.endTime /= 90e3;
16445 event.captionStreams[caption.stream] = true;
16446 event.captions.push(caption);
16447 }
16448
16449 // Translate ID3 frame PTS times into second offsets into the
16450 // video timeline for the segment
16451 for (i = 0; i < this.pendingMetadata.length; i++) {
16452 id3 = this.pendingMetadata[i];
16453 id3.cueTime = (id3.pts - timelineStartPts);
16454 id3.cueTime /= 90e3;
16455 event.metadata.push(id3);
16456 }
16457 // We add this to every single emitted segment even though we only need
16458 // it for the first
16459 event.metadata.dispatchType = this.metadataStream.dispatchType;
16460
16461 // Reset stream state
16462 this.pendingTracks.length = 0;
16463 this.videoTrack = null;
16464 this.pendingBoxes.length = 0;
16465 this.pendingCaptions.length = 0;
16466 this.pendingBytes = 0;
16467 this.pendingMetadata.length = 0;
16468
16469 // Emit the built segment
16470 this.trigger('data', event);
16471
16472 // Only emit `done` if all tracks have been flushed and emitted
16473 if (this.emittedTracks >= this.numberOfTracks) {
16474 this.trigger('done');
16475 this.emittedTracks = 0;
16476 }
16477};
16478/**
16479 * A Stream that expects MP2T binary data as input and produces
16480 * corresponding media segments, suitable for use with Media Source
16481 * Extension (MSE) implementations that support the ISO BMFF byte
16482 * stream format, like Chrome.
16483 */
16484Transmuxer = function(options) {
16485 var
16486 self = this,
16487 hasFlushed = true,
16488 videoTrack,
16489 audioTrack;
16490
16491 Transmuxer.prototype.init.call(this);
16492
16493 options = options || {};
16494 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
16495 this.transmuxPipeline_ = {};
16496
16497 this.setupAacPipeline = function() {
16498 var pipeline = {};
16499 this.transmuxPipeline_ = pipeline;
16500
16501 pipeline.type = 'aac';
16502 pipeline.metadataStream = new m2ts.MetadataStream();
16503
16504 // set up the parsing pipeline
16505 pipeline.aacStream = new AacStream();
16506 pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
16507 pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
16508 pipeline.adtsStream = new AdtsStream();
16509 pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
16510 pipeline.headOfPipeline = pipeline.aacStream;
16511
16512 pipeline.aacStream
16513 .pipe(pipeline.audioTimestampRolloverStream)
16514 .pipe(pipeline.adtsStream);
16515 pipeline.aacStream
16516 .pipe(pipeline.timedMetadataTimestampRolloverStream)
16517 .pipe(pipeline.metadataStream)
16518 .pipe(pipeline.coalesceStream);
16519
16520 pipeline.metadataStream.on('timestamp', function(frame) {
16521 pipeline.aacStream.setTimestamp(frame.timeStamp);
16522 });
16523
16524 pipeline.aacStream.on('data', function(data) {
16525 if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
16526 audioTrack = audioTrack || {
16527 timelineStartInfo: {
16528 baseMediaDecodeTime: self.baseMediaDecodeTime
16529 },
16530 codec: 'adts',
16531 type: 'audio'
16532 };
16533 // hook up the audio segment stream to the first track with aac data
16534 pipeline.coalesceStream.numberOfTracks++;
16535 pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
16536 // Set up the final part of the audio pipeline
16537 pipeline.adtsStream
16538 .pipe(pipeline.audioSegmentStream)
16539 .pipe(pipeline.coalesceStream);
16540 }
16541 });
16542
16543 // Re-emit any data coming from the coalesce stream to the outside world
16544 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
16545 // Let the consumer know we have finished flushing the entire pipeline
16546 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16547 };
16548
16549 this.setupTsPipeline = function() {
16550 var pipeline = {};
16551 this.transmuxPipeline_ = pipeline;
16552
16553 pipeline.type = 'ts';
16554 pipeline.metadataStream = new m2ts.MetadataStream();
16555
16556 // set up the parsing pipeline
16557 pipeline.packetStream = new m2ts.TransportPacketStream();
16558 pipeline.parseStream = new m2ts.TransportParseStream();
16559 pipeline.elementaryStream = new m2ts.ElementaryStream();
16560 pipeline.videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
16561 pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
16562 pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
16563 pipeline.adtsStream = new AdtsStream();
16564 pipeline.h264Stream = new H264Stream();
16565 pipeline.captionStream = new m2ts.CaptionStream();
16566 pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
16567 pipeline.headOfPipeline = pipeline.packetStream;
16568
16569 // disassemble MPEG2-TS packets into elementary streams
16570 pipeline.packetStream
16571 .pipe(pipeline.parseStream)
16572 .pipe(pipeline.elementaryStream);
16573
16574 // !!THIS ORDER IS IMPORTANT!!
16575 // demux the streams
16576 pipeline.elementaryStream
16577 .pipe(pipeline.videoTimestampRolloverStream)
16578 .pipe(pipeline.h264Stream);
16579 pipeline.elementaryStream
16580 .pipe(pipeline.audioTimestampRolloverStream)
16581 .pipe(pipeline.adtsStream);
16582
16583 pipeline.elementaryStream
16584 .pipe(pipeline.timedMetadataTimestampRolloverStream)
16585 .pipe(pipeline.metadataStream)
16586 .pipe(pipeline.coalesceStream);
16587
16588 // Hook up CEA-608/708 caption stream
16589 pipeline.h264Stream.pipe(pipeline.captionStream)
16590 .pipe(pipeline.coalesceStream);
16591
16592 pipeline.elementaryStream.on('data', function(data) {
16593 var i;
16594
16595 if (data.type === 'metadata') {
16596 i = data.tracks.length;
16597
16598 // scan the tracks listed in the metadata
16599 while (i--) {
16600 if (!videoTrack && data.tracks[i].type === 'video') {
16601 videoTrack = data.tracks[i];
16602 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16603 } else if (!audioTrack && data.tracks[i].type === 'audio') {
16604 audioTrack = data.tracks[i];
16605 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16606 }
16607 }
16608
16609 // hook up the video segment stream to the first track with h264 data
16610 if (videoTrack && !pipeline.videoSegmentStream) {
16611 pipeline.coalesceStream.numberOfTracks++;
16612 pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack, options);
16613
16614 pipeline.videoSegmentStream.on('timelineStartInfo', function(timelineStartInfo) {
16615 // When video emits timelineStartInfo data after a flush, we forward that
16616 // info to the AudioSegmentStream, if it exists, because video timeline
16617 // data takes precedence.
16618 if (audioTrack) {
16619 audioTrack.timelineStartInfo = timelineStartInfo;
16620 // On the first segment we trim AAC frames that exist before the
16621 // very earliest DTS we have seen in video because Chrome will
16622 // interpret any video track with a baseMediaDecodeTime that is
16623 // non-zero as a gap.
16624 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts);
16625 }
16626 });
16627
16628 pipeline.videoSegmentStream.on('processedGopsInfo',
16629 self.trigger.bind(self, 'gopInfo'));
16630
16631 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function(baseMediaDecodeTime) {
16632 if (audioTrack) {
16633 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
16634 }
16635 });
16636
16637 // Set up the final part of the video pipeline
16638 pipeline.h264Stream
16639 .pipe(pipeline.videoSegmentStream)
16640 .pipe(pipeline.coalesceStream);
16641 }
16642
16643 if (audioTrack && !pipeline.audioSegmentStream) {
16644 // hook up the audio segment stream to the first track with aac data
16645 pipeline.coalesceStream.numberOfTracks++;
16646 pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
16647
16648 // Set up the final part of the audio pipeline
16649 pipeline.adtsStream
16650 .pipe(pipeline.audioSegmentStream)
16651 .pipe(pipeline.coalesceStream);
16652 }
16653 }
16654 });
16655
16656 // Re-emit any data coming from the coalesce stream to the outside world
16657 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
16658 // Let the consumer know we have finished flushing the entire pipeline
16659 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16660 };
16661
16662 // hook up the segment streams once track metadata is delivered
16663 this.setBaseMediaDecodeTime = function(baseMediaDecodeTime) {
16664 var pipeline = this.transmuxPipeline_;
16665
16666 this.baseMediaDecodeTime = baseMediaDecodeTime;
16667 if (audioTrack) {
16668 audioTrack.timelineStartInfo.dts = undefined;
16669 audioTrack.timelineStartInfo.pts = undefined;
16670 clearDtsInfo(audioTrack);
16671 audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
16672 if (pipeline.audioTimestampRolloverStream) {
16673 pipeline.audioTimestampRolloverStream.discontinuity();
16674 }
16675 }
16676 if (videoTrack) {
16677 if (pipeline.videoSegmentStream) {
16678 pipeline.videoSegmentStream.gopCache_ = [];
16679 pipeline.videoTimestampRolloverStream.discontinuity();
16680 }
16681 videoTrack.timelineStartInfo.dts = undefined;
16682 videoTrack.timelineStartInfo.pts = undefined;
16683 clearDtsInfo(videoTrack);
16684 pipeline.captionStream.reset();
16685 videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
16686 }
16687
16688 if (pipeline.timedMetadataTimestampRolloverStream) {
16689 pipeline.timedMetadataTimestampRolloverStream.discontinuity();
16690 }
16691 };
16692
16693 this.setAudioAppendStart = function(timestamp) {
16694 if (audioTrack) {
16695 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
16696 }
16697 };
16698
16699 this.alignGopsWith = function(gopsToAlignWith) {
16700 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
16701 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
16702 }
16703 };
16704
16705 // feed incoming data to the front of the parsing pipeline
16706 this.push = function(data) {
16707 if (hasFlushed) {
16708 var isAac = isLikelyAacData(data);
16709
16710 if (isAac && this.transmuxPipeline_.type !== 'aac') {
16711 this.setupAacPipeline();
16712 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
16713 this.setupTsPipeline();
16714 }
16715 hasFlushed = false;
16716 }
16717 this.transmuxPipeline_.headOfPipeline.push(data);
16718 };
16719
16720 // flush any buffered data
16721 this.flush = function() {
16722 hasFlushed = true;
16723 // Start at the top of the pipeline and flush all pending work
16724 this.transmuxPipeline_.headOfPipeline.flush();
16725 };
16726
16727 // Caption data has to be reset when seeking outside buffered range
16728 this.resetCaptions = function() {
16729 if (this.transmuxPipeline_.captionStream) {
16730 this.transmuxPipeline_.captionStream.reset();
16731 }
16732 };
16733
16734};
16735Transmuxer.prototype = new Stream();
16736
16737module.exports = {
16738 Transmuxer: Transmuxer,
16739 VideoSegmentStream: VideoSegmentStream,
16740 AudioSegmentStream: AudioSegmentStream,
16741 AUDIO_PROPERTIES: AUDIO_PROPERTIES,
16742 VIDEO_PROPERTIES: VIDEO_PROPERTIES
16743};
16744
16745},{"../aac":38,"../codecs/adts.js":40,"../codecs/h264":41,"../data/silence":42,"../m2ts/m2ts.js":50,"../utils/clock":60,"../utils/stream.js":62,"./mp4-generator.js":56}],59:[function(require,module,exports){
16746/**
16747 * mux.js
16748 *
16749 * Copyright (c) 2016 Brightcove
16750 * All rights reserved.
16751 *
16752 * Parse mpeg2 transport stream packets to extract basic timing information
16753 */
16754'use strict';
16755
16756var StreamTypes = require('../m2ts/stream-types.js');
16757var handleRollover = require('../m2ts/timestamp-rollover-stream.js').handleRollover;
16758var probe = {};
16759probe.ts = require('../m2ts/probe.js');
16760probe.aac = require('../aac/probe.js');
16761
16762
16763var
16764 PES_TIMESCALE = 90000,
16765 MP2T_PACKET_LENGTH = 188, // bytes
16766 SYNC_BYTE = 0x47;
16767
16768var isLikelyAacData = function(data) {
16769 if ((data[0] === 'I'.charCodeAt(0)) &&
16770 (data[1] === 'D'.charCodeAt(0)) &&
16771 (data[2] === '3'.charCodeAt(0))) {
16772 return true;
16773 }
16774 return false;
16775};
16776
16777/**
16778 * walks through segment data looking for pat and pmt packets to parse out
16779 * program map table information
16780 */
16781var parsePsi_ = function(bytes, pmt) {
16782 var
16783 startIndex = 0,
16784 endIndex = MP2T_PACKET_LENGTH,
16785 packet, type;
16786
16787 while (endIndex < bytes.byteLength) {
16788 // Look for a pair of start and end sync bytes in the data..
16789 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
16790 // We found a packet
16791 packet = bytes.subarray(startIndex, endIndex);
16792 type = probe.ts.parseType(packet, pmt.pid);
16793
16794 switch (type) {
16795 case 'pat':
16796 if (!pmt.pid) {
16797 pmt.pid = probe.ts.parsePat(packet);
16798 }
16799 break;
16800 case 'pmt':
16801 if (!pmt.table) {
16802 pmt.table = probe.ts.parsePmt(packet);
16803 }
16804 break;
16805 default:
16806 break;
16807 }
16808
16809 // Found the pat and pmt, we can stop walking the segment
16810 if (pmt.pid && pmt.table) {
16811 return;
16812 }
16813
16814 startIndex += MP2T_PACKET_LENGTH;
16815 endIndex += MP2T_PACKET_LENGTH;
16816 continue;
16817 }
16818
16819 // If we get here, we have somehow become de-synchronized and we need to step
16820 // forward one byte at a time until we find a pair of sync bytes that denote
16821 // a packet
16822 startIndex++;
16823 endIndex++;
16824 }
16825};
16826
16827/**
16828 * walks through the segment data from the start and end to get timing information
16829 * for the first and last audio pes packets
16830 */
16831var parseAudioPes_ = function(bytes, pmt, result) {
16832 var
16833 startIndex = 0,
16834 endIndex = MP2T_PACKET_LENGTH,
16835 packet, type, pesType, pusi, parsed;
16836
16837 var endLoop = false;
16838
16839 // Start walking from start of segment to get first audio packet
16840 while (endIndex < bytes.byteLength) {
16841 // Look for a pair of start and end sync bytes in the data..
16842 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
16843 // We found a packet
16844 packet = bytes.subarray(startIndex, endIndex);
16845 type = probe.ts.parseType(packet, pmt.pid);
16846
16847 switch (type) {
16848 case 'pes':
16849 pesType = probe.ts.parsePesType(packet, pmt.table);
16850 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
16851 if (pesType === 'audio' && pusi) {
16852 parsed = probe.ts.parsePesTime(packet);
16853 if (parsed) {
16854 parsed.type = 'audio';
16855 result.audio.push(parsed);
16856 endLoop = true;
16857 }
16858 }
16859 break;
16860 default:
16861 break;
16862 }
16863
16864 if (endLoop) {
16865 break;
16866 }
16867
16868 startIndex += MP2T_PACKET_LENGTH;
16869 endIndex += MP2T_PACKET_LENGTH;
16870 continue;
16871 }
16872
16873 // If we get here, we have somehow become de-synchronized and we need to step
16874 // forward one byte at a time until we find a pair of sync bytes that denote
16875 // a packet
16876 startIndex++;
16877 endIndex++;
16878 }
16879
16880 // Start walking from end of segment to get last audio packet
16881 endIndex = bytes.byteLength;
16882 startIndex = endIndex - MP2T_PACKET_LENGTH;
16883 endLoop = false;
16884 while (startIndex >= 0) {
16885 // Look for a pair of start and end sync bytes in the data..
16886 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
16887 // We found a packet
16888 packet = bytes.subarray(startIndex, endIndex);
16889 type = probe.ts.parseType(packet, pmt.pid);
16890
16891 switch (type) {
16892 case 'pes':
16893 pesType = probe.ts.parsePesType(packet, pmt.table);
16894 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
16895 if (pesType === 'audio' && pusi) {
16896 parsed = probe.ts.parsePesTime(packet);
16897 if (parsed) {
16898 parsed.type = 'audio';
16899 result.audio.push(parsed);
16900 endLoop = true;
16901 }
16902 }
16903 break;
16904 default:
16905 break;
16906 }
16907
16908 if (endLoop) {
16909 break;
16910 }
16911
16912 startIndex -= MP2T_PACKET_LENGTH;
16913 endIndex -= MP2T_PACKET_LENGTH;
16914 continue;
16915 }
16916
16917 // If we get here, we have somehow become de-synchronized and we need to step
16918 // forward one byte at a time until we find a pair of sync bytes that denote
16919 // a packet
16920 startIndex--;
16921 endIndex--;
16922 }
16923};
16924
16925/**
16926 * walks through the segment data from the start and end to get timing information
16927 * for the first and last video pes packets as well as timing information for the first
16928 * key frame.
16929 */
16930var parseVideoPes_ = function(bytes, pmt, result) {
16931 var
16932 startIndex = 0,
16933 endIndex = MP2T_PACKET_LENGTH,
16934 packet, type, pesType, pusi, parsed, frame, i, pes;
16935
16936 var endLoop = false;
16937
16938 var currentFrame = {
16939 data: [],
16940 size: 0
16941 };
16942
16943 // Start walking from start of segment to get first video packet
16944 while (endIndex < bytes.byteLength) {
16945 // Look for a pair of start and end sync bytes in the data..
16946 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
16947 // We found a packet
16948 packet = bytes.subarray(startIndex, endIndex);
16949 type = probe.ts.parseType(packet, pmt.pid);
16950
16951 switch (type) {
16952 case 'pes':
16953 pesType = probe.ts.parsePesType(packet, pmt.table);
16954 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
16955 if (pesType === 'video') {
16956 if (pusi && !endLoop) {
16957 parsed = probe.ts.parsePesTime(packet);
16958 if (parsed) {
16959 parsed.type = 'video';
16960 result.video.push(parsed);
16961 endLoop = true;
16962 }
16963 }
16964 if (!result.firstKeyFrame) {
16965 if (pusi) {
16966 if (currentFrame.size !== 0) {
16967 frame = new Uint8Array(currentFrame.size);
16968 i = 0;
16969 while (currentFrame.data.length) {
16970 pes = currentFrame.data.shift();
16971 frame.set(pes, i);
16972 i += pes.byteLength;
16973 }
16974 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
16975 result.firstKeyFrame = probe.ts.parsePesTime(frame);
16976 result.firstKeyFrame.type = 'video';
16977 }
16978 currentFrame.size = 0;
16979 }
16980 }
16981 currentFrame.data.push(packet);
16982 currentFrame.size += packet.byteLength;
16983 }
16984 }
16985 break;
16986 default:
16987 break;
16988 }
16989
16990 if (endLoop && result.firstKeyFrame) {
16991 break;
16992 }
16993
16994 startIndex += MP2T_PACKET_LENGTH;
16995 endIndex += MP2T_PACKET_LENGTH;
16996 continue;
16997 }
16998
16999 // If we get here, we have somehow become de-synchronized and we need to step
17000 // forward one byte at a time until we find a pair of sync bytes that denote
17001 // a packet
17002 startIndex++;
17003 endIndex++;
17004 }
17005
17006 // Start walking from end of segment to get last video packet
17007 endIndex = bytes.byteLength;
17008 startIndex = endIndex - MP2T_PACKET_LENGTH;
17009 endLoop = false;
17010 while (startIndex >= 0) {
17011 // Look for a pair of start and end sync bytes in the data..
17012 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17013 // We found a packet
17014 packet = bytes.subarray(startIndex, endIndex);
17015 type = probe.ts.parseType(packet, pmt.pid);
17016
17017 switch (type) {
17018 case 'pes':
17019 pesType = probe.ts.parsePesType(packet, pmt.table);
17020 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17021 if (pesType === 'video' && pusi) {
17022 parsed = probe.ts.parsePesTime(packet);
17023 if (parsed) {
17024 parsed.type = 'video';
17025 result.video.push(parsed);
17026 endLoop = true;
17027 }
17028 }
17029 break;
17030 default:
17031 break;
17032 }
17033
17034 if (endLoop) {
17035 break;
17036 }
17037
17038 startIndex -= MP2T_PACKET_LENGTH;
17039 endIndex -= MP2T_PACKET_LENGTH;
17040 continue;
17041 }
17042
17043 // If we get here, we have somehow become de-synchronized and we need to step
17044 // forward one byte at a time until we find a pair of sync bytes that denote
17045 // a packet
17046 startIndex--;
17047 endIndex--;
17048 }
17049};
17050
17051/**
17052 * Adjusts the timestamp information for the segment to account for
17053 * rollover and convert to seconds based on pes packet timescale (90khz clock)
17054 */
17055var adjustTimestamp_ = function(segmentInfo, baseTimestamp) {
17056 if (segmentInfo.audio && segmentInfo.audio.length) {
17057 var audioBaseTimestamp = baseTimestamp;
17058 if (typeof audioBaseTimestamp === 'undefined') {
17059 audioBaseTimestamp = segmentInfo.audio[0].dts;
17060 }
17061 segmentInfo.audio.forEach(function(info) {
17062 info.dts = handleRollover(info.dts, audioBaseTimestamp);
17063 info.pts = handleRollover(info.pts, audioBaseTimestamp);
17064 // time in seconds
17065 info.dtsTime = info.dts / PES_TIMESCALE;
17066 info.ptsTime = info.pts / PES_TIMESCALE;
17067 });
17068 }
17069
17070 if (segmentInfo.video && segmentInfo.video.length) {
17071 var videoBaseTimestamp = baseTimestamp;
17072 if (typeof videoBaseTimestamp === 'undefined') {
17073 videoBaseTimestamp = segmentInfo.video[0].dts;
17074 }
17075 segmentInfo.video.forEach(function(info) {
17076 info.dts = handleRollover(info.dts, videoBaseTimestamp);
17077 info.pts = handleRollover(info.pts, videoBaseTimestamp);
17078 // time in seconds
17079 info.dtsTime = info.dts / PES_TIMESCALE;
17080 info.ptsTime = info.pts / PES_TIMESCALE;
17081 });
17082 if (segmentInfo.firstKeyFrame) {
17083 var frame = segmentInfo.firstKeyFrame;
17084 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
17085 frame.pts = handleRollover(frame.pts, videoBaseTimestamp);
17086 // time in seconds
17087 frame.dtsTime = frame.dts / PES_TIMESCALE;
17088 frame.ptsTime = frame.dts / PES_TIMESCALE;
17089 }
17090 }
17091};
17092
17093/**
17094 * inspects the aac data stream for start and end time information
17095 */
17096var inspectAac_ = function(bytes) {
17097 var
17098 endLoop = false,
17099 audioCount = 0,
17100 sampleRate = null,
17101 timestamp = null,
17102 frameSize = 0,
17103 byteIndex = 0,
17104 packet;
17105
17106 while (bytes.length - byteIndex >= 3) {
17107 var type = probe.aac.parseType(bytes, byteIndex);
17108 switch (type) {
17109 case 'timed-metadata':
17110 // Exit early because we don't have enough to parse
17111 // the ID3 tag header
17112 if (bytes.length - byteIndex < 10) {
17113 endLoop = true;
17114 break;
17115 }
17116
17117 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex);
17118
17119 // Exit early if we don't have enough in the buffer
17120 // to emit a full packet
17121 if (frameSize > bytes.length) {
17122 endLoop = true;
17123 break;
17124 }
17125 if (timestamp === null) {
17126 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
17127 timestamp = probe.aac.parseAacTimestamp(packet);
17128 }
17129 byteIndex += frameSize;
17130 break;
17131 case 'audio':
17132 // Exit early because we don't have enough to parse
17133 // the ADTS frame header
17134 if (bytes.length - byteIndex < 7) {
17135 endLoop = true;
17136 break;
17137 }
17138
17139 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex);
17140
17141 // Exit early if we don't have enough in the buffer
17142 // to emit a full packet
17143 if (frameSize > bytes.length) {
17144 endLoop = true;
17145 break;
17146 }
17147 if (sampleRate === null) {
17148 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
17149 sampleRate = probe.aac.parseSampleRate(packet);
17150 }
17151 audioCount++;
17152 byteIndex += frameSize;
17153 break;
17154 default:
17155 byteIndex++;
17156 break;
17157 }
17158 if (endLoop) {
17159 return null;
17160 }
17161 }
17162 if (sampleRate === null || timestamp === null) {
17163 return null;
17164 }
17165
17166 var audioTimescale = PES_TIMESCALE / sampleRate;
17167
17168 var result = {
17169 audio: [
17170 {
17171 type: 'audio',
17172 dts: timestamp,
17173 pts: timestamp
17174 },
17175 {
17176 type: 'audio',
17177 dts: timestamp + (audioCount * 1024 * audioTimescale),
17178 pts: timestamp + (audioCount * 1024 * audioTimescale)
17179 }
17180 ]
17181 };
17182
17183 return result;
17184};
17185
17186/**
17187 * inspects the transport stream segment data for start and end time information
17188 * of the audio and video tracks (when present) as well as the first key frame's
17189 * start time.
17190 */
17191var inspectTs_ = function(bytes) {
17192 var pmt = {
17193 pid: null,
17194 table: null
17195 };
17196
17197 var result = {};
17198
17199 parsePsi_(bytes, pmt);
17200
17201 for (var pid in pmt.table) {
17202 if (pmt.table.hasOwnProperty(pid)) {
17203 var type = pmt.table[pid];
17204 switch (type) {
17205 case StreamTypes.H264_STREAM_TYPE:
17206 result.video = [];
17207 parseVideoPes_(bytes, pmt, result);
17208 if (result.video.length === 0) {
17209 delete result.video;
17210 }
17211 break;
17212 case StreamTypes.ADTS_STREAM_TYPE:
17213 result.audio = [];
17214 parseAudioPes_(bytes, pmt, result);
17215 if (result.audio.length === 0) {
17216 delete result.audio;
17217 }
17218 break;
17219 default:
17220 break;
17221 }
17222 }
17223 }
17224 return result;
17225};
17226
17227/**
17228 * Inspects segment byte data and returns an object with start and end timing information
17229 *
17230 * @param {Uint8Array} bytes The segment byte data
17231 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
17232 * timestamps for rollover. This value must be in 90khz clock.
17233 * @return {Object} Object containing start and end frame timing info of segment.
17234 */
17235var inspect = function(bytes, baseTimestamp) {
17236 var isAacData = isLikelyAacData(bytes);
17237
17238 var result;
17239
17240 if (isAacData) {
17241 result = inspectAac_(bytes);
17242 } else {
17243 result = inspectTs_(bytes);
17244 }
17245
17246 if (!result || (!result.audio && !result.video)) {
17247 return null;
17248 }
17249
17250 adjustTimestamp_(result, baseTimestamp);
17251
17252 return result;
17253};
17254
17255module.exports = {
17256 inspect: inspect
17257};
17258
17259},{"../aac/probe.js":39,"../m2ts/probe.js":52,"../m2ts/stream-types.js":53,"../m2ts/timestamp-rollover-stream.js":54}],60:[function(require,module,exports){
17260var
17261 ONE_SECOND_IN_TS = 90000, // 90kHz clock
17262 secondsToVideoTs,
17263 secondsToAudioTs,
17264 videoTsToSeconds,
17265 audioTsToSeconds,
17266 audioTsToVideoTs,
17267 videoTsToAudioTs;
17268
17269secondsToVideoTs = function(seconds) {
17270 return seconds * ONE_SECOND_IN_TS;
17271};
17272
17273secondsToAudioTs = function(seconds, sampleRate) {
17274 return seconds * sampleRate;
17275};
17276
17277videoTsToSeconds = function(timestamp) {
17278 return timestamp / ONE_SECOND_IN_TS;
17279};
17280
17281audioTsToSeconds = function(timestamp, sampleRate) {
17282 return timestamp / sampleRate;
17283};
17284
17285audioTsToVideoTs = function(timestamp, sampleRate) {
17286 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
17287};
17288
17289videoTsToAudioTs = function(timestamp, sampleRate) {
17290 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
17291};
17292
17293module.exports = {
17294 secondsToVideoTs: secondsToVideoTs,
17295 secondsToAudioTs: secondsToAudioTs,
17296 videoTsToSeconds: videoTsToSeconds,
17297 audioTsToSeconds: audioTsToSeconds,
17298 audioTsToVideoTs: audioTsToVideoTs,
17299 videoTsToAudioTs: videoTsToAudioTs
17300};
17301
17302},{}],61:[function(require,module,exports){
17303'use strict';
17304
17305var ExpGolomb;
17306
17307/**
17308 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
17309 * scheme used by h264.
17310 */
17311ExpGolomb = function(workingData) {
17312 var
17313 // the number of bytes left to examine in workingData
17314 workingBytesAvailable = workingData.byteLength,
17315
17316 // the current word being examined
17317 workingWord = 0, // :uint
17318
17319 // the number of bits left to examine in the current word
17320 workingBitsAvailable = 0; // :uint;
17321
17322 // ():uint
17323 this.length = function() {
17324 return (8 * workingBytesAvailable);
17325 };
17326
17327 // ():uint
17328 this.bitsAvailable = function() {
17329 return (8 * workingBytesAvailable) + workingBitsAvailable;
17330 };
17331
17332 // ():void
17333 this.loadWord = function() {
17334 var
17335 position = workingData.byteLength - workingBytesAvailable,
17336 workingBytes = new Uint8Array(4),
17337 availableBytes = Math.min(4, workingBytesAvailable);
17338
17339 if (availableBytes === 0) {
17340 throw new Error('no bytes available');
17341 }
17342
17343 workingBytes.set(workingData.subarray(position,
17344 position + availableBytes));
17345 workingWord = new DataView(workingBytes.buffer).getUint32(0);
17346
17347 // track the amount of workingData that has been processed
17348 workingBitsAvailable = availableBytes * 8;
17349 workingBytesAvailable -= availableBytes;
17350 };
17351
17352 // (count:int):void
17353 this.skipBits = function(count) {
17354 var skipBytes; // :int
17355 if (workingBitsAvailable > count) {
17356 workingWord <<= count;
17357 workingBitsAvailable -= count;
17358 } else {
17359 count -= workingBitsAvailable;
17360 skipBytes = Math.floor(count / 8);
17361
17362 count -= (skipBytes * 8);
17363 workingBytesAvailable -= skipBytes;
17364
17365 this.loadWord();
17366
17367 workingWord <<= count;
17368 workingBitsAvailable -= count;
17369 }
17370 };
17371
17372 // (size:int):uint
17373 this.readBits = function(size) {
17374 var
17375 bits = Math.min(workingBitsAvailable, size), // :uint
17376 valu = workingWord >>> (32 - bits); // :uint
17377 // if size > 31, handle error
17378 workingBitsAvailable -= bits;
17379 if (workingBitsAvailable > 0) {
17380 workingWord <<= bits;
17381 } else if (workingBytesAvailable > 0) {
17382 this.loadWord();
17383 }
17384
17385 bits = size - bits;
17386 if (bits > 0) {
17387 return valu << bits | this.readBits(bits);
17388 }
17389 return valu;
17390 };
17391
17392 // ():uint
17393 this.skipLeadingZeros = function() {
17394 var leadingZeroCount; // :uint
17395 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
17396 if ((workingWord & (0x80000000 >>> leadingZeroCount)) !== 0) {
17397 // the first bit of working word is 1
17398 workingWord <<= leadingZeroCount;
17399 workingBitsAvailable -= leadingZeroCount;
17400 return leadingZeroCount;
17401 }
17402 }
17403
17404 // we exhausted workingWord and still have not found a 1
17405 this.loadWord();
17406 return leadingZeroCount + this.skipLeadingZeros();
17407 };
17408
17409 // ():void
17410 this.skipUnsignedExpGolomb = function() {
17411 this.skipBits(1 + this.skipLeadingZeros());
17412 };
17413
17414 // ():void
17415 this.skipExpGolomb = function() {
17416 this.skipBits(1 + this.skipLeadingZeros());
17417 };
17418
17419 // ():uint
17420 this.readUnsignedExpGolomb = function() {
17421 var clz = this.skipLeadingZeros(); // :uint
17422 return this.readBits(clz + 1) - 1;
17423 };
17424
17425 // ():int
17426 this.readExpGolomb = function() {
17427 var valu = this.readUnsignedExpGolomb(); // :int
17428 if (0x01 & valu) {
17429 // the number is odd if the low order bit is set
17430 return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
17431 }
17432 return -1 * (valu >>> 1); // divide by two then make it negative
17433 };
17434
17435 // Some convenience functions
17436 // :Boolean
17437 this.readBoolean = function() {
17438 return this.readBits(1) === 1;
17439 };
17440
17441 // ():int
17442 this.readUnsignedByte = function() {
17443 return this.readBits(8);
17444 };
17445
17446 this.loadWord();
17447};
17448
17449module.exports = ExpGolomb;
17450
17451},{}],62:[function(require,module,exports){
17452/**
17453 * mux.js
17454 *
17455 * Copyright (c) 2014 Brightcove
17456 * All rights reserved.
17457 *
17458 * A lightweight readable stream implemention that handles event dispatching.
17459 * Objects that inherit from streams should call init in their constructors.
17460 */
17461'use strict';
17462
17463var Stream = function() {
17464 this.init = function() {
17465 var listeners = {};
17466 /**
17467 * Add a listener for a specified event type.
17468 * @param type {string} the event name
17469 * @param listener {function} the callback to be invoked when an event of
17470 * the specified type occurs
17471 */
17472 this.on = function(type, listener) {
17473 if (!listeners[type]) {
17474 listeners[type] = [];
17475 }
17476 listeners[type] = listeners[type].concat(listener);
17477 };
17478 /**
17479 * Remove a listener for a specified event type.
17480 * @param type {string} the event name
17481 * @param listener {function} a function previously registered for this
17482 * type of event through `on`
17483 */
17484 this.off = function(type, listener) {
17485 var index;
17486 if (!listeners[type]) {
17487 return false;
17488 }
17489 index = listeners[type].indexOf(listener);
17490 listeners[type] = listeners[type].slice();
17491 listeners[type].splice(index, 1);
17492 return index > -1;
17493 };
17494 /**
17495 * Trigger an event of the specified type on this stream. Any additional
17496 * arguments to this function are passed as parameters to event listeners.
17497 * @param type {string} the event name
17498 */
17499 this.trigger = function(type) {
17500 var callbacks, i, length, args;
17501 callbacks = listeners[type];
17502 if (!callbacks) {
17503 return;
17504 }
17505 // Slicing the arguments on every invocation of this method
17506 // can add a significant amount of overhead. Avoid the
17507 // intermediate object creation for the common case of a
17508 // single callback argument
17509 if (arguments.length === 2) {
17510 length = callbacks.length;
17511 for (i = 0; i < length; ++i) {
17512 callbacks[i].call(this, arguments[1]);
17513 }
17514 } else {
17515 args = [];
17516 i = arguments.length;
17517 for (i = 1; i < arguments.length; ++i) {
17518 args.push(arguments[i]);
17519 }
17520 length = callbacks.length;
17521 for (i = 0; i < length; ++i) {
17522 callbacks[i].apply(this, args);
17523 }
17524 }
17525 };
17526 /**
17527 * Destroys the stream and cleans up.
17528 */
17529 this.dispose = function() {
17530 listeners = {};
17531 };
17532 };
17533};
17534
17535/**
17536 * Forwards all `data` events on this stream to the destination stream. The
17537 * destination stream should provide a method `push` to receive the data
17538 * events as they arrive.
17539 * @param destination {stream} the stream that will receive all `data` events
17540 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
17541 * when the current stream emits a 'done' event
17542 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
17543 */
17544Stream.prototype.pipe = function(destination) {
17545 this.on('data', function(data) {
17546 destination.push(data);
17547 });
17548
17549 this.on('done', function(flushSource) {
17550 destination.flush(flushSource);
17551 });
17552
17553 return destination;
17554};
17555
17556// Default stream functions that are expected to be overridden to perform
17557// actual work. These are provided by the prototype as a sort of no-op
17558// implementation so that we don't have to check for their existence in the
17559// `pipe` function above.
17560Stream.prototype.push = function(data) {
17561 this.trigger('data', data);
17562};
17563
17564Stream.prototype.flush = function(flushSource) {
17565 this.trigger('done', flushSource);
17566};
17567
17568module.exports = Stream;
17569
17570},{}],63:[function(require,module,exports){
17571/* jshint ignore:start */
17572(function(root) {
17573/* jshint ignore:end */
17574 var URLToolkit = {
17575 // build an absolute URL from a relative one using the provided baseURL
17576 // if relativeURL is an absolute URL it will be returned as is.
17577 buildAbsoluteURL: function(baseURL, relativeURL) {
17578 // remove any remaining space and CRLF
17579 relativeURL = relativeURL.trim();
17580 if (/^[a-z]+:/i.test(relativeURL)) {
17581 // complete url, not relative
17582 return relativeURL;
17583 }
17584
17585 var relativeURLQuery = null;
17586 var relativeURLHash = null;
17587
17588 var relativeURLHashSplit = /^([^#]*)(.*)$/.exec(relativeURL);
17589 if (relativeURLHashSplit) {
17590 relativeURLHash = relativeURLHashSplit[2];
17591 relativeURL = relativeURLHashSplit[1];
17592 }
17593 var relativeURLQuerySplit = /^([^\?]*)(.*)$/.exec(relativeURL);
17594 if (relativeURLQuerySplit) {
17595 relativeURLQuery = relativeURLQuerySplit[2];
17596 relativeURL = relativeURLQuerySplit[1];
17597 }
17598
17599 var baseURLHashSplit = /^([^#]*)(.*)$/.exec(baseURL);
17600 if (baseURLHashSplit) {
17601 baseURL = baseURLHashSplit[1];
17602 }
17603 var baseURLQuerySplit = /^([^\?]*)(.*)$/.exec(baseURL);
17604 if (baseURLQuerySplit) {
17605 baseURL = baseURLQuerySplit[1];
17606 }
17607
17608 var baseURLDomainSplit = /^(([a-z]+:)?\/\/[^:\/]+(:[0-9]+)?)?(\/?.*)$/i.exec(baseURL);
17609 if (!baseURLDomainSplit) {
17610 throw new Error('Error trying to parse base URL.');
17611 }
17612
17613 // e.g. 'http:', 'https:', ''
17614 var baseURLProtocol = baseURLDomainSplit[2] || '';
17615 // e.g. 'http://example.com', '//example.com', ''
17616 var baseURLProtocolDomain = baseURLDomainSplit[1] || '';
17617 // e.g. '/a/b/c/playlist.m3u8', 'a/b/c/playlist.m3u8'
17618 var baseURLPath = baseURLDomainSplit[4];
17619 if (baseURLPath.indexOf('/') !== 0 && baseURLProtocolDomain !== '') {
17620 // this handles a base url of http://example.com (missing last slash)
17621 baseURLPath = '/'+baseURLPath;
17622 }
17623
17624 var builtURL = null;
17625 if (/^\/\//.test(relativeURL)) {
17626 // relative url starts wth '//' so copy protocol (which may be '' if baseUrl didn't provide one)
17627 builtURL = baseURLProtocol+'//'+URLToolkit.buildAbsolutePath('', relativeURL.substring(2));
17628 }
17629 else if (/^\//.test(relativeURL)) {
17630 // relative url starts with '/' so start from root of domain
17631 builtURL = baseURLProtocolDomain+'/'+URLToolkit.buildAbsolutePath('', relativeURL.substring(1));
17632 }
17633 else {
17634 builtURL = URLToolkit.buildAbsolutePath(baseURLProtocolDomain+baseURLPath, relativeURL);
17635 }
17636
17637 // put the query and hash parts back
17638 if (relativeURLQuery) {
17639 builtURL += relativeURLQuery;
17640 }
17641 if (relativeURLHash) {
17642 builtURL += relativeURLHash;
17643 }
17644 return builtURL;
17645 },
17646
17647 // build an absolute path using the provided basePath
17648 // adapted from https://developer.mozilla.org/en-US/docs/Web/API/document/cookie#Using_relative_URLs_in_the_path_parameter
17649 // this does not handle the case where relativePath is "/" or "//". These cases should be handled outside this.
17650 buildAbsolutePath: function(basePath, relativePath) {
17651 var sRelPath = relativePath;
17652 var nUpLn, sDir = '', sPath = basePath.replace(/[^\/]*$/, sRelPath.replace(/(\/|^)(?:\.?\/+)+/g, '$1'));
17653 for (var nEnd, nStart = 0; nEnd = sPath.indexOf('/../', nStart), nEnd > -1; nStart = nEnd + nUpLn) {
17654 nUpLn = /^\/(?:\.\.\/)*/.exec(sPath.slice(nEnd))[0].length;
17655 sDir = (sDir + sPath.substring(nStart, nEnd)).replace(new RegExp('(?:\\\/+[^\\\/]*){0,' + ((nUpLn - 1) / 3) + '}$'), '/');
17656 }
17657 return sDir + sPath.substr(nStart);
17658 }
17659 };
17660
17661/* jshint ignore:start */
17662 if(typeof exports === 'object' && typeof module === 'object')
17663 module.exports = URLToolkit;
17664 else if(typeof define === 'function' && define.amd)
17665 define([], function() { return URLToolkit; });
17666 else if(typeof exports === 'object')
17667 exports["URLToolkit"] = URLToolkit;
17668 else
17669 root["URLToolkit"] = URLToolkit;
17670})(this);
17671/* jshint ignore:end */
17672
17673},{}],64:[function(require,module,exports){
17674(function (global){
17675/**
17676 * @file add-text-track-data.js
17677 */
17678'use strict';
17679
17680Object.defineProperty(exports, '__esModule', {
17681 value: true
17682});
17683
17684function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
17685
17686var _globalWindow = require('global/window');
17687
17688var _globalWindow2 = _interopRequireDefault(_globalWindow);
17689
17690var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
17691
17692var _videoJs2 = _interopRequireDefault(_videoJs);
17693
17694/**
17695 * Define properties on a cue for backwards compatability,
17696 * but warn the user that the way that they are using it
17697 * is depricated and will be removed at a later date.
17698 *
17699 * @param {Cue} cue the cue to add the properties on
17700 * @private
17701 */
17702var deprecateOldCue = function deprecateOldCue(cue) {
17703 Object.defineProperties(cue.frame, {
17704 id: {
17705 get: function get() {
17706 _videoJs2['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
17707 return cue.value.key;
17708 }
17709 },
17710 value: {
17711 get: function get() {
17712 _videoJs2['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
17713 return cue.value.data;
17714 }
17715 },
17716 privateData: {
17717 get: function get() {
17718 _videoJs2['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
17719 return cue.value.data;
17720 }
17721 }
17722 });
17723};
17724
17725var durationOfVideo = function durationOfVideo(duration) {
17726 var dur = undefined;
17727
17728 if (isNaN(duration) || Math.abs(duration) === Infinity) {
17729 dur = Number.MAX_VALUE;
17730 } else {
17731 dur = duration;
17732 }
17733 return dur;
17734};
17735/**
17736 * Add text track data to a source handler given the captions and
17737 * metadata from the buffer.
17738 *
17739 * @param {Object} sourceHandler the flash or virtual source buffer
17740 * @param {Array} captionArray an array of caption data
17741 * @param {Array} metadataArray an array of meta data
17742 * @private
17743 */
17744var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
17745 var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
17746
17747 if (captionArray) {
17748 captionArray.forEach(function (caption) {
17749 var track = caption.stream;
17750
17751 this.inbandTextTracks_[track].addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
17752 }, sourceHandler);
17753 }
17754
17755 if (metadataArray) {
17756 (function () {
17757 var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
17758
17759 metadataArray.forEach(function (metadata) {
17760 var time = metadata.cueTime + this.timestampOffset;
17761
17762 metadata.frames.forEach(function (frame) {
17763 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
17764
17765 cue.frame = frame;
17766 cue.value = frame;
17767 deprecateOldCue(cue);
17768
17769 this.metadataTrack_.addCue(cue);
17770 }, this);
17771 }, sourceHandler);
17772
17773 // Updating the metadeta cues so that
17774 // the endTime of each cue is the startTime of the next cue
17775 // the endTime of last cue is the duration of the video
17776 if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
17777 (function () {
17778 var cues = sourceHandler.metadataTrack_.cues;
17779 var cuesArray = [];
17780
17781 // Create a copy of the TextTrackCueList...
17782 // ...disregarding cues with a falsey value
17783 for (var i = 0; i < cues.length; i++) {
17784 if (cues[i]) {
17785 cuesArray.push(cues[i]);
17786 }
17787 }
17788
17789 // Group cues by their startTime value
17790 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
17791 var timeSlot = obj[cue.startTime] || [];
17792
17793 timeSlot.push(cue);
17794 obj[cue.startTime] = timeSlot;
17795
17796 return obj;
17797 }, {});
17798
17799 // Sort startTimes by ascending order
17800 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
17801 return Number(a) - Number(b);
17802 });
17803
17804 // Map each cue group's endTime to the next group's startTime
17805 sortedStartTimes.forEach(function (startTime, idx) {
17806 var cueGroup = cuesGroupedByStartTime[startTime];
17807 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
17808
17809 // Map each cue's endTime the next group's startTime
17810 cueGroup.forEach(function (cue) {
17811 cue.endTime = nextTime;
17812 });
17813 });
17814 })();
17815 }
17816 })();
17817 }
17818};
17819
17820exports['default'] = {
17821 addTextTrackData: addTextTrackData,
17822 durationOfVideo: durationOfVideo
17823};
17824module.exports = exports['default'];
17825}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
17826},{"global/window":32}],65:[function(require,module,exports){
17827/**
17828 * @file codec-utils.js
17829 */
17830
17831/**
17832 * Check if a codec string refers to an audio codec.
17833 *
17834 * @param {String} codec codec string to check
17835 * @return {Boolean} if this is an audio codec
17836 * @private
17837 */
17838'use strict';
17839
17840Object.defineProperty(exports, '__esModule', {
17841 value: true
17842});
17843var isAudioCodec = function isAudioCodec(codec) {
17844 return (/mp4a\.\d+.\d+/i.test(codec)
17845 );
17846};
17847
17848/**
17849 * Check if a codec string refers to a video codec.
17850 *
17851 * @param {String} codec codec string to check
17852 * @return {Boolean} if this is a video codec
17853 * @private
17854 */
17855var isVideoCodec = function isVideoCodec(codec) {
17856 return (/avc1\.[\da-f]+/i.test(codec)
17857 );
17858};
17859
17860/**
17861 * Parse a content type header into a type and parameters
17862 * object
17863 *
17864 * @param {String} type the content type header
17865 * @return {Object} the parsed content-type
17866 * @private
17867 */
17868var parseContentType = function parseContentType(type) {
17869 var object = { type: '', parameters: {} };
17870 var parameters = type.trim().split(';');
17871
17872 // first parameter should always be content-type
17873 object.type = parameters.shift().trim();
17874 parameters.forEach(function (parameter) {
17875 var pair = parameter.trim().split('=');
17876
17877 if (pair.length > 1) {
17878 var _name = pair[0].replace(/"/g, '').trim();
17879 var value = pair[1].replace(/"/g, '').trim();
17880
17881 object.parameters[_name] = value;
17882 }
17883 });
17884
17885 return object;
17886};
17887
17888/**
17889 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
17890 * `avc1.<hhhhhh>`
17891 *
17892 * @param {Array} codecs an array of codec strings to fix
17893 * @return {Array} the translated codec array
17894 * @private
17895 */
17896var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
17897 return codecs.map(function (codec) {
17898 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
17899 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
17900 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
17901
17902 return 'avc1.' + profileHex + '00' + avcLevelHex;
17903 });
17904 });
17905};
17906
17907exports['default'] = {
17908 isAudioCodec: isAudioCodec,
17909 parseContentType: parseContentType,
17910 isVideoCodec: isVideoCodec,
17911 translateLegacyCodecs: translateLegacyCodecs
17912};
17913module.exports = exports['default'];
17914},{}],66:[function(require,module,exports){
17915/**
17916 * @file create-text-tracks-if-necessary.js
17917 */
17918
17919/**
17920 * Create text tracks on video.js if they exist on a segment.
17921 *
17922 * @param {Object} sourceBuffer the VSB or FSB
17923 * @param {Object} mediaSource the HTML or Flash media source
17924 * @param {Object} segment the segment that may contain the text track
17925 * @private
17926 */
17927'use strict';
17928
17929Object.defineProperty(exports, '__esModule', {
17930 value: true
17931});
17932var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
17933 var player = mediaSource.player_;
17934
17935 // create an in-band caption track if one is present in the segment
17936 if (segment.captions && segment.captions.length) {
17937 if (!sourceBuffer.inbandTextTracks_) {
17938 sourceBuffer.inbandTextTracks_ = {};
17939 }
17940
17941 for (var trackId in segment.captionStreams) {
17942 if (!sourceBuffer.inbandTextTracks_[trackId]) {
17943 player.tech_.trigger({ type: 'usage', name: 'hls-608' });
17944 var track = player.textTracks().getTrackById(trackId);
17945
17946 if (track) {
17947 // Resuse an existing track with a CC# id because this was
17948 // very likely created by videojs-contrib-hls from information
17949 // in the m3u8 for us to use
17950 sourceBuffer.inbandTextTracks_[trackId] = track;
17951 } else {
17952 // Otherwise, create a track with the default `CC#` label and
17953 // without a language
17954 sourceBuffer.inbandTextTracks_[trackId] = player.addRemoteTextTrack({
17955 kind: 'captions',
17956 id: trackId,
17957 label: trackId
17958 }, false).track;
17959 }
17960 }
17961 }
17962 }
17963
17964 if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
17965 sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
17966 kind: 'metadata',
17967 label: 'Timed Metadata'
17968 }, false).track;
17969 sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
17970 }
17971};
17972
17973exports['default'] = createTextTracksIfNecessary;
17974module.exports = exports['default'];
17975},{}],67:[function(require,module,exports){
17976/**
17977 * @file flash-constants.js
17978 */
17979/**
17980 * The maximum size in bytes for append operations to the video.js
17981 * SWF. Calling through to Flash blocks and can be expensive so
17982 * we chunk data and pass through 4KB at a time, yielding to the
17983 * browser between chunks. This gives a theoretical maximum rate of
17984 * 1MB/s into Flash. Any higher and we begin to drop frames and UI
17985 * responsiveness suffers.
17986 *
17987 * @private
17988 */
17989"use strict";
17990
17991Object.defineProperty(exports, "__esModule", {
17992 value: true
17993});
17994var flashConstants = {
17995 // times in milliseconds
17996 TIME_BETWEEN_CHUNKS: 1,
17997 BYTES_PER_CHUNK: 1024 * 32
17998};
17999
18000exports["default"] = flashConstants;
18001module.exports = exports["default"];
18002},{}],68:[function(require,module,exports){
18003(function (global){
18004/**
18005 * @file flash-media-source.js
18006 */
18007'use strict';
18008
18009Object.defineProperty(exports, '__esModule', {
18010 value: true
18011});
18012
18013var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
18014
18015var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
18016
18017function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
18018
18019function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
18020
18021function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
18022
18023var _globalDocument = require('global/document');
18024
18025var _globalDocument2 = _interopRequireDefault(_globalDocument);
18026
18027var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
18028
18029var _videoJs2 = _interopRequireDefault(_videoJs);
18030
18031var _flashSourceBuffer = require('./flash-source-buffer');
18032
18033var _flashSourceBuffer2 = _interopRequireDefault(_flashSourceBuffer);
18034
18035var _flashConstants = require('./flash-constants');
18036
18037var _flashConstants2 = _interopRequireDefault(_flashConstants);
18038
18039var _codecUtils = require('./codec-utils');
18040
18041/**
18042 * A flash implmentation of HTML MediaSources and a polyfill
18043 * for browsers that don't support native or HTML MediaSources..
18044 *
18045 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
18046 * @class FlashMediaSource
18047 * @extends videojs.EventTarget
18048 */
18049
18050var FlashMediaSource = (function (_videojs$EventTarget) {
18051 _inherits(FlashMediaSource, _videojs$EventTarget);
18052
18053 function FlashMediaSource() {
18054 var _this = this;
18055
18056 _classCallCheck(this, FlashMediaSource);
18057
18058 _get(Object.getPrototypeOf(FlashMediaSource.prototype), 'constructor', this).call(this);
18059 this.sourceBuffers = [];
18060 this.readyState = 'closed';
18061
18062 this.on(['sourceopen', 'webkitsourceopen'], function (event) {
18063 // find the swf where we will push media data
18064 _this.swfObj = _globalDocument2['default'].getElementById(event.swfId);
18065 _this.player_ = (0, _videoJs2['default'])(_this.swfObj.parentNode);
18066 _this.tech_ = _this.swfObj.tech;
18067 _this.readyState = 'open';
18068
18069 _this.tech_.on('seeking', function () {
18070 var i = _this.sourceBuffers.length;
18071
18072 while (i--) {
18073 _this.sourceBuffers[i].abort();
18074 }
18075 });
18076
18077 // trigger load events
18078 if (_this.swfObj) {
18079 _this.swfObj.vjs_load();
18080 }
18081 });
18082 }
18083
18084 /**
18085 * Set or return the presentation duration.
18086 *
18087 * @param {Double} value the duration of the media in seconds
18088 * @param {Double} the current presentation duration
18089 * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
18090 */
18091
18092 /**
18093 * We have this function so that the html and flash interfaces
18094 * are the same.
18095 *
18096 * @private
18097 */
18098
18099 _createClass(FlashMediaSource, [{
18100 key: 'addSeekableRange_',
18101 value: function addSeekableRange_() {}
18102 // intentional no-op
18103
18104 /**
18105 * Create a new flash source buffer and add it to our flash media source.
18106 *
18107 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
18108 * @param {String} type the content-type of the source
18109 * @return {Object} the flash source buffer
18110 */
18111
18112 }, {
18113 key: 'addSourceBuffer',
18114 value: function addSourceBuffer(type) {
18115 var parsedType = (0, _codecUtils.parseContentType)(type);
18116 var sourceBuffer = undefined;
18117
18118 // if this is an FLV type, we'll push data to flash
18119 if (parsedType.type === 'video/mp2t' || parsedType.type === 'audio/mp2t') {
18120 // Flash source buffers
18121 sourceBuffer = new _flashSourceBuffer2['default'](this);
18122 } else {
18123 throw new Error('NotSupportedError (Video.js)');
18124 }
18125
18126 this.sourceBuffers.push(sourceBuffer);
18127 return sourceBuffer;
18128 }
18129
18130 /**
18131 * Signals the end of the stream.
18132 *
18133 * @link https://w3c.github.io/media-source/#widl-MediaSource-endOfStream-void-EndOfStreamError-error
18134 * @param {String=} error Signals that a playback error
18135 * has occurred. If specified, it must be either "network" or
18136 * "decode".
18137 */
18138 }, {
18139 key: 'endOfStream',
18140 value: function endOfStream(error) {
18141 if (error === 'network') {
18142 // MEDIA_ERR_NETWORK
18143 this.tech_.error(2);
18144 } else if (error === 'decode') {
18145 // MEDIA_ERR_DECODE
18146 this.tech_.error(3);
18147 }
18148 if (this.readyState !== 'ended') {
18149 this.readyState = 'ended';
18150 this.swfObj.vjs_endOfStream();
18151 }
18152 }
18153 }]);
18154
18155 return FlashMediaSource;
18156})(_videoJs2['default'].EventTarget);
18157
18158exports['default'] = FlashMediaSource;
18159try {
18160 Object.defineProperty(FlashMediaSource.prototype, 'duration', {
18161 /**
18162 * Return the presentation duration.
18163 *
18164 * @return {Double} the duration of the media in seconds
18165 * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
18166 */
18167 get: function get() {
18168 if (!this.swfObj) {
18169 return NaN;
18170 }
18171 // get the current duration from the SWF
18172 return this.swfObj.vjs_getProperty('duration');
18173 },
18174 /**
18175 * Set the presentation duration.
18176 *
18177 * @param {Double} value the duration of the media in seconds
18178 * @return {Double} the duration of the media in seconds
18179 * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
18180 */
18181 set: function set(value) {
18182 var i = undefined;
18183 var oldDuration = this.swfObj.vjs_getProperty('duration');
18184
18185 this.swfObj.vjs_setProperty('duration', value);
18186
18187 if (value < oldDuration) {
18188 // In MSE, this triggers the range removal algorithm which causes
18189 // an update to occur
18190 for (i = 0; i < this.sourceBuffers.length; i++) {
18191 this.sourceBuffers[i].remove(value, oldDuration);
18192 }
18193 }
18194
18195 return value;
18196 }
18197 });
18198} catch (e) {
18199 // IE8 throws if defineProperty is called on a non-DOM node. We
18200 // don't support IE8 but we shouldn't throw an error if loaded
18201 // there.
18202 FlashMediaSource.prototype.duration = NaN;
18203}
18204
18205for (var property in _flashConstants2['default']) {
18206 FlashMediaSource[property] = _flashConstants2['default'][property];
18207}
18208module.exports = exports['default'];
18209}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
18210},{"./codec-utils":65,"./flash-constants":67,"./flash-source-buffer":69,"global/document":31}],69:[function(require,module,exports){
18211(function (global){
18212/**
18213 * @file flash-source-buffer.js
18214 */
18215'use strict';
18216
18217Object.defineProperty(exports, '__esModule', {
18218 value: true
18219});
18220
18221var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
18222
18223var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
18224
18225function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
18226
18227function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
18228
18229function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
18230
18231var _globalWindow = require('global/window');
18232
18233var _globalWindow2 = _interopRequireDefault(_globalWindow);
18234
18235var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
18236
18237var _videoJs2 = _interopRequireDefault(_videoJs);
18238
18239var _muxJsLibFlv = require('mux.js/lib/flv');
18240
18241var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
18242
18243var _removeCuesFromTrack = require('./remove-cues-from-track');
18244
18245var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
18246
18247var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
18248
18249var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
18250
18251var _addTextTrackData = require('./add-text-track-data');
18252
18253var _flashTransmuxerWorker = require('./flash-transmuxer-worker');
18254
18255var _flashTransmuxerWorker2 = _interopRequireDefault(_flashTransmuxerWorker);
18256
18257var _webworkify = require('webworkify');
18258
18259var _webworkify2 = _interopRequireDefault(_webworkify);
18260
18261var _flashConstants = require('./flash-constants');
18262
18263var _flashConstants2 = _interopRequireDefault(_flashConstants);
18264
18265/**
18266 * A wrapper around the setTimeout function that uses
18267 * the flash constant time between ticks value.
18268 *
18269 * @param {Function} func the function callback to run
18270 * @private
18271 */
18272var scheduleTick = function scheduleTick(func) {
18273 // Chrome doesn't invoke requestAnimationFrame callbacks
18274 // in background tabs, so use setTimeout.
18275 _globalWindow2['default'].setTimeout(func, _flashConstants2['default'].TIME_BETWEEN_CHUNKS);
18276};
18277
18278/**
18279 * Generates a random string of max length 6
18280 *
18281 * @return {String} the randomly generated string
18282 * @function generateRandomString
18283 * @private
18284 */
18285var generateRandomString = function generateRandomString() {
18286 return Math.random().toString(36).slice(2, 8);
18287};
18288
18289/**
18290 * Round a number to a specified number of places much like
18291 * toFixed but return a number instead of a string representation.
18292 *
18293 * @param {Number} num A number
18294 * @param {Number} places The number of decimal places which to
18295 * round
18296 * @private
18297 */
18298var toDecimalPlaces = function toDecimalPlaces(num, places) {
18299 if (typeof places !== 'number' || places < 0) {
18300 places = 0;
18301 }
18302
18303 var scale = Math.pow(10, places);
18304
18305 return Math.round(num * scale) / scale;
18306};
18307
18308/**
18309 * A SourceBuffer implementation for Flash rather than HTML.
18310 *
18311 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
18312 * @param {Object} mediaSource the flash media source
18313 * @class FlashSourceBuffer
18314 * @extends videojs.EventTarget
18315 */
18316
18317var FlashSourceBuffer = (function (_videojs$EventTarget) {
18318 _inherits(FlashSourceBuffer, _videojs$EventTarget);
18319
18320 function FlashSourceBuffer(mediaSource) {
18321 var _this = this;
18322
18323 _classCallCheck(this, FlashSourceBuffer);
18324
18325 _get(Object.getPrototypeOf(FlashSourceBuffer.prototype), 'constructor', this).call(this);
18326 var encodedHeader = undefined;
18327
18328 // Start off using the globally defined value but refine
18329 // as we append data into flash
18330 this.chunkSize_ = _flashConstants2['default'].BYTES_PER_CHUNK;
18331
18332 // byte arrays queued to be appended
18333 this.buffer_ = [];
18334
18335 // the total number of queued bytes
18336 this.bufferSize_ = 0;
18337
18338 // to be able to determine the correct position to seek to, we
18339 // need to retain information about the mapping between the
18340 // media timeline and PTS values
18341 this.basePtsOffset_ = NaN;
18342
18343 this.mediaSource_ = mediaSource;
18344
18345 this.audioBufferEnd_ = NaN;
18346 this.videoBufferEnd_ = NaN;
18347
18348 // indicates whether the asynchronous continuation of an operation
18349 // is still being processed
18350 // see https://w3c.github.io/media-source/#widl-SourceBuffer-updating
18351 this.updating = false;
18352 this.timestampOffset_ = 0;
18353
18354 encodedHeader = _globalWindow2['default'].btoa(String.fromCharCode.apply(null, Array.prototype.slice.call(_muxJsLibFlv2['default'].getFlvHeader())));
18355
18356 // create function names with added randomness for the global callbacks flash will use
18357 // to get data from javascript into the swf. Random strings are added as a safety
18358 // measure for pages with multiple players since these functions will be global
18359 // instead of per instance. When making a call to the swf, the browser generates a
18360 // try catch code snippet, but just takes the function name and writes out an unquoted
18361 // call to that function. If the player id has any special characters, this will result
18362 // in an error, so safePlayerId replaces all special characters to '_'
18363 var safePlayerId = this.mediaSource_.player_.id().replace(/[^a-zA-Z0-9]/g, '_');
18364
18365 this.flashEncodedHeaderName_ = 'vjs_flashEncodedHeader_' + safePlayerId + generateRandomString();
18366 this.flashEncodedDataName_ = 'vjs_flashEncodedData_' + safePlayerId + generateRandomString();
18367
18368 _globalWindow2['default'][this.flashEncodedHeaderName_] = function () {
18369 delete _globalWindow2['default'][_this.flashEncodedHeaderName_];
18370 return encodedHeader;
18371 };
18372
18373 this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedHeaderName_);
18374
18375 this.transmuxer_ = (0, _webworkify2['default'])(_flashTransmuxerWorker2['default']);
18376 this.transmuxer_.postMessage({ action: 'init', options: {} });
18377 this.transmuxer_.onmessage = function (event) {
18378 if (event.data.action === 'data') {
18379 _this.receiveBuffer_(event.data.segment);
18380 }
18381 };
18382
18383 this.one('updateend', function () {
18384 _this.mediaSource_.tech_.trigger('loadedmetadata');
18385 });
18386
18387 Object.defineProperty(this, 'timestampOffset', {
18388 get: function get() {
18389 return this.timestampOffset_;
18390 },
18391 set: function set(val) {
18392 if (typeof val === 'number' && val >= 0) {
18393 this.timestampOffset_ = val;
18394 // We have to tell flash to expect a discontinuity
18395 this.mediaSource_.swfObj.vjs_discontinuity();
18396 // the media <-> PTS mapping must be re-established after
18397 // the discontinuity
18398 this.basePtsOffset_ = NaN;
18399 this.audioBufferEnd_ = NaN;
18400 this.videoBufferEnd_ = NaN;
18401
18402 this.transmuxer_.postMessage({ action: 'reset' });
18403 }
18404 }
18405 });
18406
18407 Object.defineProperty(this, 'buffered', {
18408 get: function get() {
18409 if (!this.mediaSource_ || !this.mediaSource_.swfObj || !('vjs_getProperty' in this.mediaSource_.swfObj)) {
18410 return _videoJs2['default'].createTimeRange();
18411 }
18412
18413 var buffered = this.mediaSource_.swfObj.vjs_getProperty('buffered');
18414
18415 if (buffered && buffered.length) {
18416 buffered[0][0] = toDecimalPlaces(buffered[0][0], 3);
18417 buffered[0][1] = toDecimalPlaces(buffered[0][1], 3);
18418 }
18419 return _videoJs2['default'].createTimeRanges(buffered);
18420 }
18421 });
18422
18423 // On a seek we remove all text track data since flash has no concept
18424 // of a buffered-range and everything else is reset on seek
18425 this.mediaSource_.player_.on('seeked', function () {
18426 (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.metadataTrack_);
18427 if (_this.inbandTextTracks_) {
18428 for (var track in _this.inbandTextTracks_) {
18429 (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.inbandTextTracks_[track]);
18430 }
18431 }
18432 });
18433
18434 var onHlsReset = this.onHlsReset_.bind(this);
18435
18436 // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
18437 // resets its state and flushes the buffer
18438 this.mediaSource_.player_.tech_.on('hls-reset', onHlsReset);
18439
18440 this.mediaSource_.player_.tech_.hls.on('dispose', function () {
18441 _this.transmuxer_.terminate();
18442 _this.mediaSource_.player_.tech_.off('hls-reset', onHlsReset);
18443 });
18444 }
18445
18446 /**
18447 * Append bytes to the sourcebuffers buffer, in this case we
18448 * have to append it to swf object.
18449 *
18450 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
18451 * @param {Array} bytes
18452 */
18453
18454 _createClass(FlashSourceBuffer, [{
18455 key: 'appendBuffer',
18456 value: function appendBuffer(bytes) {
18457 var error = undefined;
18458
18459 if (this.updating) {
18460 error = new Error('SourceBuffer.append() cannot be called ' + 'while an update is in progress');
18461 error.name = 'InvalidStateError';
18462 error.code = 11;
18463 throw error;
18464 }
18465 this.updating = true;
18466 this.mediaSource_.readyState = 'open';
18467 this.trigger({ type: 'update' });
18468
18469 this.transmuxer_.postMessage({
18470 action: 'push',
18471 data: bytes.buffer,
18472 byteOffset: bytes.byteOffset,
18473 byteLength: bytes.byteLength
18474 }, [bytes.buffer]);
18475 this.transmuxer_.postMessage({ action: 'flush' });
18476 }
18477
18478 /**
18479 * Reset the parser and remove any data queued to be sent to the SWF.
18480 *
18481 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
18482 */
18483 }, {
18484 key: 'abort',
18485 value: function abort() {
18486 this.buffer_ = [];
18487 this.bufferSize_ = 0;
18488 this.mediaSource_.swfObj.vjs_abort();
18489
18490 // report any outstanding updates have ended
18491 if (this.updating) {
18492 this.updating = false;
18493 this.trigger({ type: 'updateend' });
18494 }
18495 }
18496
18497 /**
18498 * Flash cannot remove ranges already buffered in the NetStream
18499 * but seeking clears the buffer entirely. For most purposes,
18500 * having this operation act as a no-op is acceptable.
18501 *
18502 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
18503 * @param {Double} start start of the section to remove
18504 * @param {Double} end end of the section to remove
18505 */
18506 }, {
18507 key: 'remove',
18508 value: function remove(start, end) {
18509 (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
18510 if (this.inbandTextTracks_) {
18511 for (var track in this.inbandTextTracks_) {
18512 (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTracks_[track]);
18513 }
18514 }
18515 this.trigger({ type: 'update' });
18516 this.trigger({ type: 'updateend' });
18517 }
18518
18519 /**
18520 * Receive a buffer from the flv.
18521 *
18522 * @param {Object} segment
18523 * @private
18524 */
18525 }, {
18526 key: 'receiveBuffer_',
18527 value: function receiveBuffer_(segment) {
18528 var _this2 = this;
18529
18530 // create an in-band caption track if one is present in the segment
18531 (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
18532 (0, _addTextTrackData.addTextTrackData)(this, segment.captions, segment.metadata);
18533
18534 // Do this asynchronously since convertTagsToData_ can be time consuming
18535 scheduleTick(function () {
18536 var flvBytes = _this2.convertTagsToData_(segment);
18537
18538 if (_this2.buffer_.length === 0) {
18539 scheduleTick(_this2.processBuffer_.bind(_this2));
18540 }
18541
18542 if (flvBytes) {
18543 _this2.buffer_.push(flvBytes);
18544 _this2.bufferSize_ += flvBytes.byteLength;
18545 }
18546 });
18547 }
18548
18549 /**
18550 * Append a portion of the current buffer to the SWF.
18551 *
18552 * @private
18553 */
18554 }, {
18555 key: 'processBuffer_',
18556 value: function processBuffer_() {
18557 var _this3 = this;
18558
18559 var chunkSize = _flashConstants2['default'].BYTES_PER_CHUNK;
18560
18561 if (!this.buffer_.length) {
18562 if (this.updating !== false) {
18563 this.updating = false;
18564 this.trigger({ type: 'updateend' });
18565 }
18566 // do nothing if the buffer is empty
18567 return;
18568 }
18569
18570 // concatenate appends up to the max append size
18571 var chunk = this.buffer_[0].subarray(0, chunkSize);
18572
18573 // requeue any bytes that won't make it this round
18574 if (chunk.byteLength < chunkSize || this.buffer_[0].byteLength === chunkSize) {
18575 this.buffer_.shift();
18576 } else {
18577 this.buffer_[0] = this.buffer_[0].subarray(chunkSize);
18578 }
18579
18580 this.bufferSize_ -= chunk.byteLength;
18581
18582 // base64 encode the bytes
18583 var binary = [];
18584 var length = chunk.byteLength;
18585
18586 for (var i = 0; i < length; i++) {
18587 binary.push(String.fromCharCode(chunk[i]));
18588 }
18589 var b64str = _globalWindow2['default'].btoa(binary.join(''));
18590
18591 _globalWindow2['default'][this.flashEncodedDataName_] = function () {
18592 // schedule another processBuffer to process any left over data or to
18593 // trigger updateend
18594 scheduleTick(_this3.processBuffer_.bind(_this3));
18595 delete _globalWindow2['default'][_this3.flashEncodedDataName_];
18596 return b64str;
18597 };
18598
18599 // Notify the swf that segment data is ready to be appended
18600 this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedDataName_);
18601 }
18602
18603 /**
18604 * Turns an array of flv tags into a Uint8Array representing the
18605 * flv data. Also removes any tags that are before the current
18606 * time so that playback begins at or slightly after the right
18607 * place on a seek
18608 *
18609 * @private
18610 * @param {Object} segmentData object of segment data
18611 */
18612 }, {
18613 key: 'convertTagsToData_',
18614 value: function convertTagsToData_(segmentData) {
18615 var segmentByteLength = 0;
18616 var tech = this.mediaSource_.tech_;
18617 var videoTargetPts = 0;
18618 var segment = undefined;
18619 var videoTags = segmentData.tags.videoTags;
18620 var audioTags = segmentData.tags.audioTags;
18621
18622 // Establish the media timeline to PTS translation if we don't
18623 // have one already
18624 if (isNaN(this.basePtsOffset_) && (videoTags.length || audioTags.length)) {
18625 // We know there is at least one video or audio tag, but since we may not have both,
18626 // we use pts: Infinity for the missing tag. The will force the following Math.min
18627 // call will to use the proper pts value since it will always be less than Infinity
18628 var firstVideoTag = videoTags[0] || { pts: Infinity };
18629 var firstAudioTag = audioTags[0] || { pts: Infinity };
18630
18631 this.basePtsOffset_ = Math.min(firstAudioTag.pts, firstVideoTag.pts);
18632 }
18633
18634 if (tech.seeking()) {
18635 // Do not use previously saved buffer end values while seeking since buffer
18636 // is cleared on all seeks
18637 this.videoBufferEnd_ = NaN;
18638 this.audioBufferEnd_ = NaN;
18639 }
18640
18641 if (isNaN(this.videoBufferEnd_)) {
18642 if (tech.buffered().length) {
18643 videoTargetPts = tech.buffered().end(0) - this.timestampOffset;
18644 }
18645
18646 // Trim to currentTime if seeking
18647 if (tech.seeking()) {
18648 videoTargetPts = Math.max(videoTargetPts, tech.currentTime() - this.timestampOffset);
18649 }
18650
18651 // PTS values are represented in milliseconds
18652 videoTargetPts *= 1e3;
18653 videoTargetPts += this.basePtsOffset_;
18654 } else {
18655 // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
18656 // could append an overlapping segment, in which case there is a high likelyhood
18657 // a tag could have a matching pts to videoBufferEnd_, which would cause
18658 // that tag to get appended by the tag.pts >= targetPts check below even though it
18659 // is a duplicate of what was previously appended
18660 videoTargetPts = this.videoBufferEnd_ + 0.1;
18661 }
18662
18663 // filter complete GOPs with a presentation time less than the seek target/end of buffer
18664 var currentIndex = videoTags.length;
18665
18666 // if the last tag is beyond videoTargetPts, then do not search the list for a GOP
18667 // since our videoTargetPts lies in a future segment
18668 if (currentIndex && videoTags[currentIndex - 1].pts >= videoTargetPts) {
18669 // Start by walking backwards from the end of the list until we reach a tag that
18670 // is equal to or less than videoTargetPts
18671 while (--currentIndex) {
18672 var currentTag = videoTags[currentIndex];
18673
18674 if (currentTag.pts > videoTargetPts) {
18675 continue;
18676 }
18677
18678 // if we see a keyFrame or metadata tag once we've gone below videoTargetPts,
18679 // exit the loop as this is the start of the GOP that we want to append
18680 if (currentTag.keyFrame || currentTag.metaDataTag) {
18681 break;
18682 }
18683 }
18684
18685 // We need to check if there are any metadata tags that come before currentIndex
18686 // as those will be metadata tags associated with the GOP we are appending
18687 // There could be 0 to 2 metadata tags that come before the currentIndex depending
18688 // on what videoTargetPts is and whether the transmuxer prepended metadata tags to this
18689 // key frame
18690 while (currentIndex) {
18691 var nextTag = videoTags[currentIndex - 1];
18692
18693 if (!nextTag.metaDataTag) {
18694 break;
18695 }
18696
18697 currentIndex--;
18698 }
18699 }
18700
18701 var filteredVideoTags = videoTags.slice(currentIndex);
18702
18703 var audioTargetPts = undefined;
18704
18705 if (isNaN(this.audioBufferEnd_)) {
18706 audioTargetPts = videoTargetPts;
18707 } else {
18708 // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
18709 // could append an overlapping segment, in which case there is a high likelyhood
18710 // a tag could have a matching pts to videoBufferEnd_, which would cause
18711 // that tag to get appended by the tag.pts >= targetPts check below even though it
18712 // is a duplicate of what was previously appended
18713 audioTargetPts = this.audioBufferEnd_ + 0.1;
18714 }
18715
18716 if (filteredVideoTags.length) {
18717 // If targetPts intersects a GOP and we appended the tags for the GOP that came
18718 // before targetPts, we want to make sure to trim audio tags at the pts
18719 // of the first video tag to avoid brief moments of silence
18720 audioTargetPts = Math.min(audioTargetPts, filteredVideoTags[0].pts);
18721 }
18722
18723 // skip tags with a presentation time less than the seek target/end of buffer
18724 currentIndex = 0;
18725
18726 while (currentIndex < audioTags.length) {
18727 if (audioTags[currentIndex].pts >= audioTargetPts) {
18728 break;
18729 }
18730
18731 currentIndex++;
18732 }
18733
18734 var filteredAudioTags = audioTags.slice(currentIndex);
18735
18736 // update the audio and video buffer ends
18737 if (filteredAudioTags.length) {
18738 this.audioBufferEnd_ = filteredAudioTags[filteredAudioTags.length - 1].pts;
18739 }
18740 if (filteredVideoTags.length) {
18741 this.videoBufferEnd_ = filteredVideoTags[filteredVideoTags.length - 1].pts;
18742 }
18743
18744 var tags = this.getOrderedTags_(filteredVideoTags, filteredAudioTags);
18745
18746 if (tags.length === 0) {
18747 return;
18748 }
18749
18750 // If we are appending data that comes before our target pts, we want to tell
18751 // the swf to adjust its notion of current time to account for the extra tags
18752 // we are appending to complete the GOP that intersects with targetPts
18753 if (tags[0].pts < videoTargetPts && tech.seeking()) {
18754 var fudgeFactor = 1 / 30;
18755 var currentTime = tech.currentTime();
18756 var diff = (videoTargetPts - tags[0].pts) / 1e3;
18757 var adjustedTime = currentTime - diff;
18758
18759 if (adjustedTime < fudgeFactor) {
18760 adjustedTime = 0;
18761 }
18762
18763 try {
18764 this.mediaSource_.swfObj.vjs_adjustCurrentTime(adjustedTime);
18765 } catch (e) {
18766 // no-op for backwards compatability of swf. If adjustCurrentTime fails,
18767 // the swf may incorrectly report currentTime and buffered ranges
18768 // but should not affect playback over than the time displayed on the
18769 // progress bar is inaccurate
18770 }
18771 }
18772
18773 // concatenate the bytes into a single segment
18774 for (var i = 0; i < tags.length; i++) {
18775 segmentByteLength += tags[i].bytes.byteLength;
18776 }
18777 segment = new Uint8Array(segmentByteLength);
18778 for (var i = 0, j = 0; i < tags.length; i++) {
18779 segment.set(tags[i].bytes, j);
18780 j += tags[i].bytes.byteLength;
18781 }
18782
18783 return segment;
18784 }
18785
18786 /**
18787 * Assemble the FLV tags in decoder order.
18788 *
18789 * @private
18790 * @param {Array} videoTags list of video tags
18791 * @param {Array} audioTags list of audio tags
18792 */
18793 }, {
18794 key: 'getOrderedTags_',
18795 value: function getOrderedTags_(videoTags, audioTags) {
18796 var tag = undefined;
18797 var tags = [];
18798
18799 while (videoTags.length || audioTags.length) {
18800 if (!videoTags.length) {
18801 // only audio tags remain
18802 tag = audioTags.shift();
18803 } else if (!audioTags.length) {
18804 // only video tags remain
18805 tag = videoTags.shift();
18806 } else if (audioTags[0].dts < videoTags[0].dts) {
18807 // audio should be decoded next
18808 tag = audioTags.shift();
18809 } else {
18810 // video should be decoded next
18811 tag = videoTags.shift();
18812 }
18813
18814 tags.push(tag);
18815 }
18816
18817 return tags;
18818 }
18819 }, {
18820 key: 'onHlsReset_',
18821 value: function onHlsReset_() {
18822 this.transmuxer_.postMessage({ action: 'resetCaptions' });
18823 }
18824 }]);
18825
18826 return FlashSourceBuffer;
18827})(_videoJs2['default'].EventTarget);
18828
18829exports['default'] = FlashSourceBuffer;
18830module.exports = exports['default'];
18831}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
18832},{"./add-text-track-data":64,"./create-text-tracks-if-necessary":66,"./flash-constants":67,"./flash-transmuxer-worker":70,"./remove-cues-from-track":72,"global/window":32,"mux.js/lib/flv":46,"webworkify":76}],70:[function(require,module,exports){
18833/**
18834 * @file flash-transmuxer-worker.js
18835 */
18836'use strict';
18837
18838Object.defineProperty(exports, '__esModule', {
18839 value: true
18840});
18841
18842var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
18843
18844function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
18845
18846function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
18847
18848var _globalWindow = require('global/window');
18849
18850var _globalWindow2 = _interopRequireDefault(_globalWindow);
18851
18852var _muxJsLibFlv = require('mux.js/lib/flv');
18853
18854var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
18855
18856/**
18857 * Re-emits transmuxer events by converting them into messages to the
18858 * world outside the worker.
18859 *
18860 * @param {Object} transmuxer the transmuxer to wire events on
18861 * @private
18862 */
18863var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
18864 transmuxer.on('data', function (segment) {
18865 _globalWindow2['default'].postMessage({
18866 action: 'data',
18867 segment: segment
18868 });
18869 });
18870
18871 transmuxer.on('done', function (data) {
18872 _globalWindow2['default'].postMessage({ action: 'done' });
18873 });
18874};
18875
18876/**
18877 * All incoming messages route through this hash. If no function exists
18878 * to handle an incoming message, then we ignore the message.
18879 *
18880 * @class MessageHandlers
18881 * @param {Object} options the options to initialize with
18882 */
18883
18884var MessageHandlers = (function () {
18885 function MessageHandlers(options) {
18886 _classCallCheck(this, MessageHandlers);
18887
18888 this.options = options || {};
18889 this.init();
18890 }
18891
18892 /**
18893 * Our web wroker interface so that things can talk to mux.js
18894 * that will be running in a web worker. The scope is passed to this by
18895 * webworkify.
18896 *
18897 * @param {Object} self the scope for the web worker
18898 */
18899
18900 /**
18901 * initialize our web worker and wire all the events.
18902 */
18903
18904 _createClass(MessageHandlers, [{
18905 key: 'init',
18906 value: function init() {
18907 if (this.transmuxer) {
18908 this.transmuxer.dispose();
18909 }
18910 this.transmuxer = new _muxJsLibFlv2['default'].Transmuxer(this.options);
18911 wireTransmuxerEvents(this.transmuxer);
18912 }
18913
18914 /**
18915 * Adds data (a ts segment) to the start of the transmuxer pipeline for
18916 * processing.
18917 *
18918 * @param {ArrayBuffer} data data to push into the muxer
18919 */
18920 }, {
18921 key: 'push',
18922 value: function push(data) {
18923 // Cast array buffer to correct type for transmuxer
18924 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18925
18926 this.transmuxer.push(segment);
18927 }
18928
18929 /**
18930 * Recreate the transmuxer so that the next segment added via `push`
18931 * start with a fresh transmuxer.
18932 */
18933 }, {
18934 key: 'reset',
18935 value: function reset() {
18936 this.init();
18937 }
18938
18939 /**
18940 * Forces the pipeline to finish processing the last segment and emit its
18941 * results.
18942 */
18943 }, {
18944 key: 'flush',
18945 value: function flush() {
18946 this.transmuxer.flush();
18947 }
18948 }, {
18949 key: 'resetCaptions',
18950 value: function resetCaptions() {
18951 this.transmuxer.resetCaptions();
18952 }
18953 }]);
18954
18955 return MessageHandlers;
18956})();
18957
18958var FlashTransmuxerWorker = function FlashTransmuxerWorker(self) {
18959 self.onmessage = function (event) {
18960 if (event.data.action === 'init' && event.data.options) {
18961 this.messageHandlers = new MessageHandlers(event.data.options);
18962 return;
18963 }
18964
18965 if (!this.messageHandlers) {
18966 this.messageHandlers = new MessageHandlers();
18967 }
18968
18969 if (event.data && event.data.action && event.data.action !== 'init') {
18970 if (this.messageHandlers[event.data.action]) {
18971 this.messageHandlers[event.data.action](event.data);
18972 }
18973 }
18974 };
18975};
18976
18977exports['default'] = function (self) {
18978 return new FlashTransmuxerWorker(self);
18979};
18980
18981module.exports = exports['default'];
18982},{"global/window":32,"mux.js/lib/flv":46}],71:[function(require,module,exports){
18983(function (global){
18984/**
18985 * @file html-media-source.js
18986 */
18987'use strict';
18988
18989Object.defineProperty(exports, '__esModule', {
18990 value: true
18991});
18992
18993var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
18994
18995var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
18996
18997function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
18998
18999function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
19000
19001function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
19002
19003var _globalWindow = require('global/window');
19004
19005var _globalWindow2 = _interopRequireDefault(_globalWindow);
19006
19007var _globalDocument = require('global/document');
19008
19009var _globalDocument2 = _interopRequireDefault(_globalDocument);
19010
19011var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
19012
19013var _videoJs2 = _interopRequireDefault(_videoJs);
19014
19015var _virtualSourceBuffer = require('./virtual-source-buffer');
19016
19017var _virtualSourceBuffer2 = _interopRequireDefault(_virtualSourceBuffer);
19018
19019var _addTextTrackData = require('./add-text-track-data');
19020
19021var _codecUtils = require('./codec-utils');
19022
19023/**
19024 * Our MediaSource implementation in HTML, mimics native
19025 * MediaSource where/if possible.
19026 *
19027 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
19028 * @class HtmlMediaSource
19029 * @extends videojs.EventTarget
19030 */
19031
19032var HtmlMediaSource = (function (_videojs$EventTarget) {
19033 _inherits(HtmlMediaSource, _videojs$EventTarget);
19034
19035 function HtmlMediaSource() {
19036 var _this = this;
19037
19038 _classCallCheck(this, HtmlMediaSource);
19039
19040 _get(Object.getPrototypeOf(HtmlMediaSource.prototype), 'constructor', this).call(this);
19041 var property = undefined;
19042
19043 this.nativeMediaSource_ = new _globalWindow2['default'].MediaSource();
19044 // delegate to the native MediaSource's methods by default
19045 for (property in this.nativeMediaSource_) {
19046 if (!(property in HtmlMediaSource.prototype) && typeof this.nativeMediaSource_[property] === 'function') {
19047 this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);
19048 }
19049 }
19050
19051 // emulate `duration` and `seekable` until seeking can be
19052 // handled uniformly for live streams
19053 // see https://github.com/w3c/media-source/issues/5
19054 this.duration_ = NaN;
19055 Object.defineProperty(this, 'duration', {
19056 get: function get() {
19057 if (this.duration_ === Infinity) {
19058 return this.duration_;
19059 }
19060 return this.nativeMediaSource_.duration;
19061 },
19062 set: function set(duration) {
19063 this.duration_ = duration;
19064 if (duration !== Infinity) {
19065 this.nativeMediaSource_.duration = duration;
19066 return;
19067 }
19068 }
19069 });
19070 Object.defineProperty(this, 'seekable', {
19071 get: function get() {
19072 if (this.duration_ === Infinity) {
19073 return _videoJs2['default'].createTimeRanges([[0, this.nativeMediaSource_.duration]]);
19074 }
19075 return this.nativeMediaSource_.seekable;
19076 }
19077 });
19078
19079 Object.defineProperty(this, 'readyState', {
19080 get: function get() {
19081 return this.nativeMediaSource_.readyState;
19082 }
19083 });
19084
19085 Object.defineProperty(this, 'activeSourceBuffers', {
19086 get: function get() {
19087 return this.activeSourceBuffers_;
19088 }
19089 });
19090
19091 // the list of virtual and native SourceBuffers created by this
19092 // MediaSource
19093 this.sourceBuffers = [];
19094
19095 this.activeSourceBuffers_ = [];
19096
19097 /**
19098 * update the list of active source buffers based upon various
19099 * imformation from HLS and video.js
19100 *
19101 * @private
19102 */
19103 this.updateActiveSourceBuffers_ = function () {
19104 // Retain the reference but empty the array
19105 _this.activeSourceBuffers_.length = 0;
19106
19107 // If there is only one source buffer, then it will always be active and audio will
19108 // be disabled based on the codec of the source buffer
19109 if (_this.sourceBuffers.length === 1) {
19110 var sourceBuffer = _this.sourceBuffers[0];
19111
19112 sourceBuffer.appendAudioInitSegment_ = true;
19113 sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
19114 _this.activeSourceBuffers_.push(sourceBuffer);
19115 return;
19116 }
19117
19118 // There are 2 source buffers, a combined (possibly video only) source buffer and
19119 // and an audio only source buffer.
19120 // By default, the audio in the combined virtual source buffer is enabled
19121 // and the audio-only source buffer (if it exists) is disabled.
19122 var disableCombined = false;
19123 var disableAudioOnly = true;
19124
19125 // TODO: maybe we can store the sourcebuffers on the track objects?
19126 // safari may do something like this
19127 for (var i = 0; i < _this.player_.audioTracks().length; i++) {
19128 var track = _this.player_.audioTracks()[i];
19129
19130 if (track.enabled && track.kind !== 'main') {
19131 // The enabled track is an alternate audio track so disable the audio in
19132 // the combined source buffer and enable the audio-only source buffer.
19133 disableCombined = true;
19134 disableAudioOnly = false;
19135 break;
19136 }
19137 }
19138
19139 _this.sourceBuffers.forEach(function (sourceBuffer) {
19140 /* eslinst-disable */
19141 // TODO once codecs are required, we can switch to using the codecs to determine
19142 // what stream is the video stream, rather than relying on videoTracks
19143 /* eslinst-enable */
19144
19145 sourceBuffer.appendAudioInitSegment_ = true;
19146
19147 if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
19148 // combined
19149 sourceBuffer.audioDisabled_ = disableCombined;
19150 } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
19151 // If the "combined" source buffer is video only, then we do not want
19152 // disable the audio-only source buffer (this is mostly for demuxed
19153 // audio and video hls)
19154 sourceBuffer.audioDisabled_ = true;
19155 disableAudioOnly = false;
19156 } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
19157 // audio only
19158 sourceBuffer.audioDisabled_ = disableAudioOnly;
19159 if (disableAudioOnly) {
19160 return;
19161 }
19162 }
19163
19164 _this.activeSourceBuffers_.push(sourceBuffer);
19165 });
19166 };
19167
19168 this.onPlayerMediachange_ = function () {
19169 _this.sourceBuffers.forEach(function (sourceBuffer) {
19170 sourceBuffer.appendAudioInitSegment_ = true;
19171 });
19172 };
19173
19174 this.onHlsReset_ = function () {
19175 _this.sourceBuffers.forEach(function (sourceBuffer) {
19176 if (sourceBuffer.transmuxer_) {
19177 sourceBuffer.transmuxer_.postMessage({ action: 'resetCaptions' });
19178 }
19179 });
19180 };
19181
19182 this.onHlsSegmentTimeMapping_ = function (event) {
19183 _this.sourceBuffers.forEach(function (buffer) {
19184 return buffer.timeMapping_ = event.mapping;
19185 });
19186 };
19187
19188 // Re-emit MediaSource events on the polyfill
19189 ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
19190 this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
19191 }, this);
19192
19193 // capture the associated player when the MediaSource is
19194 // successfully attached
19195 this.on('sourceopen', function (event) {
19196 // Get the player this MediaSource is attached to
19197 var video = _globalDocument2['default'].querySelector('[src="' + _this.url_ + '"]');
19198
19199 if (!video) {
19200 return;
19201 }
19202
19203 _this.player_ = (0, _videoJs2['default'])(video.parentNode);
19204
19205 // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
19206 // resets its state and flushes the buffer
19207 _this.player_.tech_.on('hls-reset', _this.onHlsReset_);
19208 // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
19209 // SegmentLoader inspects an MTS segment and has an accurate stream to display
19210 // time mapping
19211 _this.player_.tech_.on('hls-segment-time-mapping', _this.onHlsSegmentTimeMapping_);
19212
19213 if (_this.player_.audioTracks && _this.player_.audioTracks()) {
19214 _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
19215 _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
19216 _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
19217 }
19218
19219 _this.player_.on('mediachange', _this.onPlayerMediachange_);
19220 });
19221
19222 this.on('sourceended', function (event) {
19223 var duration = (0, _addTextTrackData.durationOfVideo)(_this.duration);
19224
19225 for (var i = 0; i < _this.sourceBuffers.length; i++) {
19226 var sourcebuffer = _this.sourceBuffers[i];
19227 var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
19228
19229 if (cues && cues.length) {
19230 cues[cues.length - 1].endTime = duration;
19231 }
19232 }
19233 });
19234
19235 // explicitly terminate any WebWorkers that were created
19236 // by SourceHandlers
19237 this.on('sourceclose', function (event) {
19238 this.sourceBuffers.forEach(function (sourceBuffer) {
19239 if (sourceBuffer.transmuxer_) {
19240 sourceBuffer.transmuxer_.terminate();
19241 }
19242 });
19243
19244 this.sourceBuffers.length = 0;
19245 if (!this.player_) {
19246 return;
19247 }
19248
19249 if (this.player_.audioTracks && this.player_.audioTracks()) {
19250 this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
19251 this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
19252 this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
19253 }
19254
19255 // We can only change this if the player hasn't been disposed of yet
19256 // because `off` eventually tries to use the el_ property. If it has
19257 // been disposed of, then don't worry about it because there are no
19258 // event handlers left to unbind anyway
19259 if (this.player_.el_) {
19260 this.player_.off('mediachange', this.onPlayerMediachange_);
19261 this.player_.tech_.off('hls-reset', this.onHlsReset_);
19262 this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
19263 }
19264 });
19265 }
19266
19267 /**
19268 * Add a range that that can now be seeked to.
19269 *
19270 * @param {Double} start where to start the addition
19271 * @param {Double} end where to end the addition
19272 * @private
19273 */
19274
19275 _createClass(HtmlMediaSource, [{
19276 key: 'addSeekableRange_',
19277 value: function addSeekableRange_(start, end) {
19278 var error = undefined;
19279
19280 if (this.duration !== Infinity) {
19281 error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
19282 error.name = 'InvalidStateError';
19283 error.code = 11;
19284 throw error;
19285 }
19286
19287 if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
19288 this.nativeMediaSource_.duration = end;
19289 }
19290 }
19291
19292 /**
19293 * Add a source buffer to the media source.
19294 *
19295 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
19296 * @param {String} type the content-type of the content
19297 * @return {Object} the created source buffer
19298 */
19299 }, {
19300 key: 'addSourceBuffer',
19301 value: function addSourceBuffer(type) {
19302 var buffer = undefined;
19303 var parsedType = (0, _codecUtils.parseContentType)(type);
19304
19305 // Create a VirtualSourceBuffer to transmux MPEG-2 transport
19306 // stream segments into fragmented MP4s
19307 if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
19308 var codecs = [];
19309
19310 if (parsedType.parameters && parsedType.parameters.codecs) {
19311 codecs = parsedType.parameters.codecs.split(',');
19312 codecs = (0, _codecUtils.translateLegacyCodecs)(codecs);
19313 codecs = codecs.filter(function (codec) {
19314 return (0, _codecUtils.isAudioCodec)(codec) || (0, _codecUtils.isVideoCodec)(codec);
19315 });
19316 }
19317
19318 if (codecs.length === 0) {
19319 codecs = ['avc1.4d400d', 'mp4a.40.2'];
19320 }
19321
19322 buffer = new _virtualSourceBuffer2['default'](this, codecs);
19323
19324 if (this.sourceBuffers.length !== 0) {
19325 // If another VirtualSourceBuffer already exists, then we are creating a
19326 // SourceBuffer for an alternate audio track and therefore we know that
19327 // the source has both an audio and video track.
19328 // That means we should trigger the manual creation of the real
19329 // SourceBuffers instead of waiting for the transmuxer to return data
19330 this.sourceBuffers[0].createRealSourceBuffers_();
19331 buffer.createRealSourceBuffers_();
19332
19333 // Automatically disable the audio on the first source buffer if
19334 // a second source buffer is ever created
19335 this.sourceBuffers[0].audioDisabled_ = true;
19336 }
19337 } else {
19338 // delegate to the native implementation
19339 buffer = this.nativeMediaSource_.addSourceBuffer(type);
19340 }
19341
19342 this.sourceBuffers.push(buffer);
19343 return buffer;
19344 }
19345 }]);
19346
19347 return HtmlMediaSource;
19348})(_videoJs2['default'].EventTarget);
19349
19350exports['default'] = HtmlMediaSource;
19351module.exports = exports['default'];
19352}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
19353},{"./add-text-track-data":64,"./codec-utils":65,"./virtual-source-buffer":75,"global/document":31,"global/window":32}],72:[function(require,module,exports){
19354/**
19355 * @file remove-cues-from-track.js
19356 */
19357
19358/**
19359 * Remove cues from a track on video.js.
19360 *
19361 * @param {Double} start start of where we should remove the cue
19362 * @param {Double} end end of where the we should remove the cue
19363 * @param {Object} track the text track to remove the cues from
19364 * @private
19365 */
19366"use strict";
19367
19368Object.defineProperty(exports, "__esModule", {
19369 value: true
19370});
19371var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
19372 var i = undefined;
19373 var cue = undefined;
19374
19375 if (!track) {
19376 return;
19377 }
19378
19379 if (!track.cues) {
19380 return;
19381 }
19382
19383 i = track.cues.length;
19384
19385 while (i--) {
19386 cue = track.cues[i];
19387
19388 // Remove any overlapping cue
19389 if (cue.startTime <= end && cue.endTime >= start) {
19390 track.removeCue(cue);
19391 }
19392 }
19393};
19394
19395exports["default"] = removeCuesFromTrack;
19396module.exports = exports["default"];
19397},{}],73:[function(require,module,exports){
19398/**
19399 * @file transmuxer-worker.js
19400 */
19401
19402/**
19403 * videojs-contrib-media-sources
19404 *
19405 * Copyright (c) 2015 Brightcove
19406 * All rights reserved.
19407 *
19408 * Handles communication between the browser-world and the mux.js
19409 * transmuxer running inside of a WebWorker by exposing a simple
19410 * message-based interface to a Transmuxer object.
19411 */
19412'use strict';
19413
19414Object.defineProperty(exports, '__esModule', {
19415 value: true
19416});
19417
19418var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
19419
19420function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
19421
19422function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
19423
19424var _globalWindow = require('global/window');
19425
19426var _globalWindow2 = _interopRequireDefault(_globalWindow);
19427
19428var _muxJsLibMp4 = require('mux.js/lib/mp4');
19429
19430var _muxJsLibMp42 = _interopRequireDefault(_muxJsLibMp4);
19431
19432/**
19433 * Re-emits transmuxer events by converting them into messages to the
19434 * world outside the worker.
19435 *
19436 * @param {Object} transmuxer the transmuxer to wire events on
19437 * @private
19438 */
19439var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
19440 transmuxer.on('data', function (segment) {
19441 // transfer ownership of the underlying ArrayBuffer
19442 // instead of doing a copy to save memory
19443 // ArrayBuffers are transferable but generic TypedArrays are not
19444 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
19445 var initArray = segment.initSegment;
19446
19447 segment.initSegment = {
19448 data: initArray.buffer,
19449 byteOffset: initArray.byteOffset,
19450 byteLength: initArray.byteLength
19451 };
19452
19453 var typedArray = segment.data;
19454
19455 segment.data = typedArray.buffer;
19456 _globalWindow2['default'].postMessage({
19457 action: 'data',
19458 segment: segment,
19459 byteOffset: typedArray.byteOffset,
19460 byteLength: typedArray.byteLength
19461 }, [segment.data]);
19462 });
19463
19464 if (transmuxer.captionStream) {
19465 transmuxer.captionStream.on('data', function (caption) {
19466 _globalWindow2['default'].postMessage({
19467 action: 'caption',
19468 data: caption
19469 });
19470 });
19471 }
19472
19473 transmuxer.on('done', function (data) {
19474 _globalWindow2['default'].postMessage({ action: 'done' });
19475 });
19476
19477 transmuxer.on('gopInfo', function (gopInfo) {
19478 _globalWindow2['default'].postMessage({
19479 action: 'gopInfo',
19480 gopInfo: gopInfo
19481 });
19482 });
19483};
19484
19485/**
19486 * All incoming messages route through this hash. If no function exists
19487 * to handle an incoming message, then we ignore the message.
19488 *
19489 * @class MessageHandlers
19490 * @param {Object} options the options to initialize with
19491 */
19492
19493var MessageHandlers = (function () {
19494 function MessageHandlers(options) {
19495 _classCallCheck(this, MessageHandlers);
19496
19497 this.options = options || {};
19498 this.init();
19499 }
19500
19501 /**
19502 * Our web wroker interface so that things can talk to mux.js
19503 * that will be running in a web worker. the scope is passed to this by
19504 * webworkify.
19505 *
19506 * @param {Object} self the scope for the web worker
19507 */
19508
19509 /**
19510 * initialize our web worker and wire all the events.
19511 */
19512
19513 _createClass(MessageHandlers, [{
19514 key: 'init',
19515 value: function init() {
19516 if (this.transmuxer) {
19517 this.transmuxer.dispose();
19518 }
19519 this.transmuxer = new _muxJsLibMp42['default'].Transmuxer(this.options);
19520 wireTransmuxerEvents(this.transmuxer);
19521 }
19522
19523 /**
19524 * Adds data (a ts segment) to the start of the transmuxer pipeline for
19525 * processing.
19526 *
19527 * @param {ArrayBuffer} data data to push into the muxer
19528 */
19529 }, {
19530 key: 'push',
19531 value: function push(data) {
19532 // Cast array buffer to correct type for transmuxer
19533 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
19534
19535 this.transmuxer.push(segment);
19536 }
19537
19538 /**
19539 * Recreate the transmuxer so that the next segment added via `push`
19540 * start with a fresh transmuxer.
19541 */
19542 }, {
19543 key: 'reset',
19544 value: function reset() {
19545 this.init();
19546 }
19547
19548 /**
19549 * Set the value that will be used as the `baseMediaDecodeTime` time for the
19550 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
19551 * set relative to the first based on the PTS values.
19552 *
19553 * @param {Object} data used to set the timestamp offset in the muxer
19554 */
19555 }, {
19556 key: 'setTimestampOffset',
19557 value: function setTimestampOffset(data) {
19558 var timestampOffset = data.timestampOffset || 0;
19559
19560 this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
19561 }
19562 }, {
19563 key: 'setAudioAppendStart',
19564 value: function setAudioAppendStart(data) {
19565 this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
19566 }
19567
19568 /**
19569 * Forces the pipeline to finish processing the last segment and emit it's
19570 * results.
19571 *
19572 * @param {Object} data event data, not really used
19573 */
19574 }, {
19575 key: 'flush',
19576 value: function flush(data) {
19577 this.transmuxer.flush();
19578 }
19579 }, {
19580 key: 'resetCaptions',
19581 value: function resetCaptions() {
19582 this.transmuxer.resetCaptions();
19583 }
19584 }, {
19585 key: 'alignGopsWith',
19586 value: function alignGopsWith(data) {
19587 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
19588 }
19589 }]);
19590
19591 return MessageHandlers;
19592})();
19593
19594var TransmuxerWorker = function TransmuxerWorker(self) {
19595 self.onmessage = function (event) {
19596 if (event.data.action === 'init' && event.data.options) {
19597 this.messageHandlers = new MessageHandlers(event.data.options);
19598 return;
19599 }
19600
19601 if (!this.messageHandlers) {
19602 this.messageHandlers = new MessageHandlers();
19603 }
19604
19605 if (event.data && event.data.action && event.data.action !== 'init') {
19606 if (this.messageHandlers[event.data.action]) {
19607 this.messageHandlers[event.data.action](event.data);
19608 }
19609 }
19610 };
19611};
19612
19613exports['default'] = function (self) {
19614 return new TransmuxerWorker(self);
19615};
19616
19617module.exports = exports['default'];
19618},{"global/window":32,"mux.js/lib/mp4":55}],74:[function(require,module,exports){
19619(function (global){
19620/**
19621 * @file videojs-contrib-media-sources.js
19622 */
19623'use strict';
19624
19625Object.defineProperty(exports, '__esModule', {
19626 value: true
19627});
19628
19629function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
19630
19631var _globalWindow = require('global/window');
19632
19633var _globalWindow2 = _interopRequireDefault(_globalWindow);
19634
19635var _flashMediaSource = require('./flash-media-source');
19636
19637var _flashMediaSource2 = _interopRequireDefault(_flashMediaSource);
19638
19639var _htmlMediaSource = require('./html-media-source');
19640
19641var _htmlMediaSource2 = _interopRequireDefault(_htmlMediaSource);
19642
19643var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
19644
19645var _videoJs2 = _interopRequireDefault(_videoJs);
19646
19647var urlCount = 0;
19648
19649// ------------
19650// Media Source
19651// ------------
19652
19653var defaults = {
19654 // how to determine the MediaSource implementation to use. There
19655 // are three available modes:
19656 // - auto: use native MediaSources where available and Flash
19657 // everywhere else
19658 // - html5: always use native MediaSources
19659 // - flash: always use the Flash MediaSource polyfill
19660 mode: 'auto'
19661};
19662
19663// store references to the media sources so they can be connected
19664// to a video element (a swf object)
19665// TODO: can we store this somewhere local to this module?
19666_videoJs2['default'].mediaSources = {};
19667
19668/**
19669 * Provide a method for a swf object to notify JS that a
19670 * media source is now open.
19671 *
19672 * @param {String} msObjectURL string referencing the MSE Object URL
19673 * @param {String} swfId the swf id
19674 */
19675var open = function open(msObjectURL, swfId) {
19676 var mediaSource = _videoJs2['default'].mediaSources[msObjectURL];
19677
19678 if (mediaSource) {
19679 mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
19680 } else {
19681 throw new Error('Media Source not found (Video.js)');
19682 }
19683};
19684
19685/**
19686 * Check to see if the native MediaSource object exists and supports
19687 * an MP4 container with both H.264 video and AAC-LC audio.
19688 *
19689 * @return {Boolean} if native media sources are supported
19690 */
19691var supportsNativeMediaSources = function supportsNativeMediaSources() {
19692 return !!_globalWindow2['default'].MediaSource && !!_globalWindow2['default'].MediaSource.isTypeSupported && _globalWindow2['default'].MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
19693};
19694
19695/**
19696 * An emulation of the MediaSource API so that we can support
19697 * native and non-native functionality such as flash and
19698 * video/mp2t videos. returns an instance of HtmlMediaSource or
19699 * FlashMediaSource depending on what is supported and what options
19700 * are passed in.
19701 *
19702 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
19703 * @param {Object} options options to use during setup.
19704 */
19705var MediaSource = function MediaSource(options) {
19706 var settings = _videoJs2['default'].mergeOptions(defaults, options);
19707
19708 this.MediaSource = {
19709 open: open,
19710 supportsNativeMediaSources: supportsNativeMediaSources
19711 };
19712
19713 // determine whether HTML MediaSources should be used
19714 if (settings.mode === 'html5' || settings.mode === 'auto' && supportsNativeMediaSources()) {
19715 return new _htmlMediaSource2['default']();
19716 } else if (_videoJs2['default'].getTech('Flash')) {
19717 return new _flashMediaSource2['default']();
19718 }
19719
19720 throw new Error('Cannot use Flash or Html5 to create a MediaSource for this video');
19721};
19722
19723exports.MediaSource = MediaSource;
19724MediaSource.open = open;
19725MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
19726
19727/**
19728 * A wrapper around the native URL for our MSE object
19729 * implementation, this object is exposed under videojs.URL
19730 *
19731 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
19732 */
19733var URL = {
19734 /**
19735 * A wrapper around the native createObjectURL for our objects.
19736 * This function maps a native or emulated mediaSource to a blob
19737 * url so that it can be loaded into video.js
19738 *
19739 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
19740 * @param {MediaSource} object the object to create a blob url to
19741 */
19742 createObjectURL: function createObjectURL(object) {
19743 var objectUrlPrefix = 'blob:vjs-media-source/';
19744 var url = undefined;
19745
19746 // use the native MediaSource to generate an object URL
19747 if (object instanceof _htmlMediaSource2['default']) {
19748 url = _globalWindow2['default'].URL.createObjectURL(object.nativeMediaSource_);
19749 object.url_ = url;
19750 return url;
19751 }
19752 // if the object isn't an emulated MediaSource, delegate to the
19753 // native implementation
19754 if (!(object instanceof _flashMediaSource2['default'])) {
19755 url = _globalWindow2['default'].URL.createObjectURL(object);
19756 object.url_ = url;
19757 return url;
19758 }
19759
19760 // build a URL that can be used to map back to the emulated
19761 // MediaSource
19762 url = objectUrlPrefix + urlCount;
19763
19764 urlCount++;
19765
19766 // setup the mapping back to object
19767 _videoJs2['default'].mediaSources[url] = object;
19768
19769 return url;
19770 }
19771};
19772
19773exports.URL = URL;
19774_videoJs2['default'].MediaSource = MediaSource;
19775_videoJs2['default'].URL = URL;
19776}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
19777},{"./flash-media-source":68,"./html-media-source":71,"global/window":32}],75:[function(require,module,exports){
19778(function (global){
19779/**
19780 * @file virtual-source-buffer.js
19781 */
19782'use strict';
19783
19784Object.defineProperty(exports, '__esModule', {
19785 value: true
19786});
19787
19788var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
19789
19790var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
19791
19792function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
19793
19794function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
19795
19796function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
19797
19798var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
19799
19800var _videoJs2 = _interopRequireDefault(_videoJs);
19801
19802var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
19803
19804var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
19805
19806var _removeCuesFromTrack = require('./remove-cues-from-track');
19807
19808var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
19809
19810var _addTextTrackData = require('./add-text-track-data');
19811
19812var _webworkify = require('webworkify');
19813
19814var _webworkify2 = _interopRequireDefault(_webworkify);
19815
19816var _transmuxerWorker = require('./transmuxer-worker');
19817
19818var _transmuxerWorker2 = _interopRequireDefault(_transmuxerWorker);
19819
19820var _codecUtils = require('./codec-utils');
19821
19822// We create a wrapper around the SourceBuffer so that we can manage the
19823// state of the `updating` property manually. We have to do this because
19824// Firefox changes `updating` to false long before triggering `updateend`
19825// events and that was causing strange problems in videojs-contrib-hls
19826var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
19827 var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
19828 var wrapper = Object.create(null);
19829
19830 wrapper.updating = false;
19831 wrapper.realBuffer_ = sourceBuffer;
19832
19833 var _loop = function (key) {
19834 if (typeof sourceBuffer[key] === 'function') {
19835 wrapper[key] = function () {
19836 return sourceBuffer[key].apply(sourceBuffer, arguments);
19837 };
19838 } else if (typeof wrapper[key] === 'undefined') {
19839 Object.defineProperty(wrapper, key, {
19840 get: function get() {
19841 return sourceBuffer[key];
19842 },
19843 set: function set(v) {
19844 return sourceBuffer[key] = v;
19845 }
19846 });
19847 }
19848 };
19849
19850 for (var key in sourceBuffer) {
19851 _loop(key);
19852 }
19853
19854 return wrapper;
19855};
19856
19857/**
19858 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
19859 * front of current time.
19860 *
19861 * @param {Array} buffer
19862 * The current buffer of gop information
19863 * @param {Player} player
19864 * The player instance
19865 * @param {Double} mapping
19866 * Offset to map display time to stream presentation time
19867 * @return {Array}
19868 * List of gops considered safe to append over
19869 */
19870var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, player, mapping) {
19871 if (!player || !buffer.length) {
19872 return [];
19873 }
19874
19875 // pts value for current time + 3 seconds to give a bit more wiggle room
19876 var currentTimePts = Math.ceil((player.currentTime() - mapping + 3) * 90000);
19877
19878 var i = undefined;
19879
19880 for (i = 0; i < buffer.length; i++) {
19881 if (buffer[i].pts > currentTimePts) {
19882 break;
19883 }
19884 }
19885
19886 return buffer.slice(i);
19887};
19888
19889exports.gopsSafeToAlignWith = gopsSafeToAlignWith;
19890/**
19891 * Appends gop information (timing and byteLength) received by the transmuxer for the
19892 * gops appended in the last call to appendBuffer
19893 *
19894 * @param {Array} buffer
19895 * The current buffer of gop information
19896 * @param {Array} gops
19897 * List of new gop information
19898 * @param {boolean} replace
19899 * If true, replace the buffer with the new gop information. If false, append the
19900 * new gop information to the buffer in the right location of time.
19901 * @return {Array}
19902 * Updated list of gop information
19903 */
19904var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
19905 if (!gops.length) {
19906 return buffer;
19907 }
19908
19909 if (replace) {
19910 // If we are in safe append mode, then completely overwrite the gop buffer
19911 // with the most recent appeneded data. This will make sure that when appending
19912 // future segments, we only try to align with gops that are both ahead of current
19913 // time and in the last segment appended.
19914 return gops.slice();
19915 }
19916
19917 var start = gops[0].pts;
19918
19919 var i = 0;
19920
19921 for (i; i < buffer.length; i++) {
19922 if (buffer[i].pts >= start) {
19923 break;
19924 }
19925 }
19926
19927 return buffer.slice(0, i).concat(gops);
19928};
19929
19930exports.updateGopBuffer = updateGopBuffer;
19931/**
19932 * Removes gop information in buffer that overlaps with provided start and end
19933 *
19934 * @param {Array} buffer
19935 * The current buffer of gop information
19936 * @param {Double} start
19937 * position to start the remove at
19938 * @param {Double} end
19939 * position to end the remove at
19940 * @param {Double} mapping
19941 * Offset to map display time to stream presentation time
19942 */
19943var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
19944 var startPts = Math.ceil((start - mapping) * 90000);
19945 var endPts = Math.ceil((end - mapping) * 90000);
19946 var updatedBuffer = buffer.slice();
19947
19948 var i = buffer.length;
19949
19950 while (i--) {
19951 if (buffer[i].pts <= endPts) {
19952 break;
19953 }
19954 }
19955
19956 if (i === -1) {
19957 // no removal because end of remove range is before start of buffer
19958 return updatedBuffer;
19959 }
19960
19961 var j = i + 1;
19962
19963 while (j--) {
19964 if (buffer[j].pts <= startPts) {
19965 break;
19966 }
19967 }
19968
19969 // clamp remove range start to 0 index
19970 j = Math.max(j, 0);
19971
19972 updatedBuffer.splice(j, i - j + 1);
19973
19974 return updatedBuffer;
19975};
19976
19977exports.removeGopBuffer = removeGopBuffer;
19978/**
19979 * VirtualSourceBuffers exist so that we can transmux non native formats
19980 * into a native format, but keep the same api as a native source buffer.
19981 * It creates a transmuxer, that works in its own thread (a web worker) and
19982 * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
19983 * then send all of that data to the naive sourcebuffer so that it is
19984 * indestinguishable from a natively supported format.
19985 *
19986 * @param {HtmlMediaSource} mediaSource the parent mediaSource
19987 * @param {Array} codecs array of codecs that we will be dealing with
19988 * @class VirtualSourceBuffer
19989 * @extends video.js.EventTarget
19990 */
19991
19992var VirtualSourceBuffer = (function (_videojs$EventTarget) {
19993 _inherits(VirtualSourceBuffer, _videojs$EventTarget);
19994
19995 function VirtualSourceBuffer(mediaSource, codecs) {
19996 var _this = this;
19997
19998 _classCallCheck(this, VirtualSourceBuffer);
19999
20000 _get(Object.getPrototypeOf(VirtualSourceBuffer.prototype), 'constructor', this).call(this, _videoJs2['default'].EventTarget);
20001 this.timestampOffset_ = 0;
20002 this.pendingBuffers_ = [];
20003 this.bufferUpdating_ = false;
20004
20005 this.mediaSource_ = mediaSource;
20006 this.codecs_ = codecs;
20007 this.audioCodec_ = null;
20008 this.videoCodec_ = null;
20009 this.audioDisabled_ = false;
20010 this.appendAudioInitSegment_ = true;
20011 this.gopBuffer_ = [];
20012 this.timeMapping_ = 0;
20013 this.safeAppend_ = _videoJs2['default'].browser.IE_VERSION >= 11;
20014
20015 var options = {
20016 remux: false,
20017 alignGopsAtEnd: this.safeAppend_
20018 };
20019
20020 this.codecs_.forEach(function (codec) {
20021 if ((0, _codecUtils.isAudioCodec)(codec)) {
20022 _this.audioCodec_ = codec;
20023 } else if ((0, _codecUtils.isVideoCodec)(codec)) {
20024 _this.videoCodec_ = codec;
20025 }
20026 });
20027
20028 // append muxed segments to their respective native buffers as
20029 // soon as they are available
20030 this.transmuxer_ = (0, _webworkify2['default'])(_transmuxerWorker2['default']);
20031 this.transmuxer_.postMessage({ action: 'init', options: options });
20032
20033 this.transmuxer_.onmessage = function (event) {
20034 if (event.data.action === 'data') {
20035 return _this.data_(event);
20036 }
20037
20038 if (event.data.action === 'done') {
20039 return _this.done_(event);
20040 }
20041
20042 if (event.data.action === 'gopInfo') {
20043 return _this.appendGopInfo_(event);
20044 }
20045 };
20046
20047 // this timestampOffset is a property with the side-effect of resetting
20048 // baseMediaDecodeTime in the transmuxer on the setter
20049 Object.defineProperty(this, 'timestampOffset', {
20050 get: function get() {
20051 return this.timestampOffset_;
20052 },
20053 set: function set(val) {
20054 if (typeof val === 'number' && val >= 0) {
20055 this.timestampOffset_ = val;
20056 this.appendAudioInitSegment_ = true;
20057
20058 // reset gop buffer on timestampoffset as this signals a change in timeline
20059 this.gopBuffer_.length = 0;
20060 this.timeMapping_ = 0;
20061
20062 // We have to tell the transmuxer to set the baseMediaDecodeTime to
20063 // the desired timestampOffset for the next segment
20064 this.transmuxer_.postMessage({
20065 action: 'setTimestampOffset',
20066 timestampOffset: val
20067 });
20068 }
20069 }
20070 });
20071
20072 // setting the append window affects both source buffers
20073 Object.defineProperty(this, 'appendWindowStart', {
20074 get: function get() {
20075 return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
20076 },
20077 set: function set(start) {
20078 if (this.videoBuffer_) {
20079 this.videoBuffer_.appendWindowStart = start;
20080 }
20081 if (this.audioBuffer_) {
20082 this.audioBuffer_.appendWindowStart = start;
20083 }
20084 }
20085 });
20086
20087 // this buffer is "updating" if either of its native buffers are
20088 Object.defineProperty(this, 'updating', {
20089 get: function get() {
20090 return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
20091 }
20092 });
20093
20094 // the buffered property is the intersection of the buffered
20095 // ranges of the native source buffers
20096 Object.defineProperty(this, 'buffered', {
20097 get: function get() {
20098 var start = null;
20099 var end = null;
20100 var arity = 0;
20101 var extents = [];
20102 var ranges = [];
20103
20104 // neither buffer has been created yet
20105 if (!this.videoBuffer_ && !this.audioBuffer_) {
20106 return _videoJs2['default'].createTimeRange();
20107 }
20108
20109 // only one buffer is configured
20110 if (!this.videoBuffer_) {
20111 return this.audioBuffer_.buffered;
20112 }
20113 if (!this.audioBuffer_) {
20114 return this.videoBuffer_.buffered;
20115 }
20116
20117 // both buffers are configured
20118 if (this.audioDisabled_) {
20119 return this.videoBuffer_.buffered;
20120 }
20121
20122 // both buffers are empty
20123 if (this.videoBuffer_.buffered.length === 0 && this.audioBuffer_.buffered.length === 0) {
20124 return _videoJs2['default'].createTimeRange();
20125 }
20126
20127 // Handle the case where we have both buffers and create an
20128 // intersection of the two
20129 var videoBuffered = this.videoBuffer_.buffered;
20130 var audioBuffered = this.audioBuffer_.buffered;
20131 var count = videoBuffered.length;
20132
20133 // A) Gather up all start and end times
20134 while (count--) {
20135 extents.push({ time: videoBuffered.start(count), type: 'start' });
20136 extents.push({ time: videoBuffered.end(count), type: 'end' });
20137 }
20138 count = audioBuffered.length;
20139 while (count--) {
20140 extents.push({ time: audioBuffered.start(count), type: 'start' });
20141 extents.push({ time: audioBuffered.end(count), type: 'end' });
20142 }
20143 // B) Sort them by time
20144 extents.sort(function (a, b) {
20145 return a.time - b.time;
20146 });
20147
20148 // C) Go along one by one incrementing arity for start and decrementing
20149 // arity for ends
20150 for (count = 0; count < extents.length; count++) {
20151 if (extents[count].type === 'start') {
20152 arity++;
20153
20154 // D) If arity is ever incremented to 2 we are entering an
20155 // overlapping range
20156 if (arity === 2) {
20157 start = extents[count].time;
20158 }
20159 } else if (extents[count].type === 'end') {
20160 arity--;
20161
20162 // E) If arity is ever decremented to 1 we leaving an
20163 // overlapping range
20164 if (arity === 1) {
20165 end = extents[count].time;
20166 }
20167 }
20168
20169 // F) Record overlapping ranges
20170 if (start !== null && end !== null) {
20171 ranges.push([start, end]);
20172 start = null;
20173 end = null;
20174 }
20175 }
20176
20177 return _videoJs2['default'].createTimeRanges(ranges);
20178 }
20179 });
20180 }
20181
20182 /**
20183 * When we get a data event from the transmuxer
20184 * we call this function and handle the data that
20185 * was sent to us
20186 *
20187 * @private
20188 * @param {Event} event the data event from the transmuxer
20189 */
20190
20191 _createClass(VirtualSourceBuffer, [{
20192 key: 'data_',
20193 value: function data_(event) {
20194 var segment = event.data.segment;
20195
20196 // Cast ArrayBuffer to TypedArray
20197 segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
20198
20199 segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
20200
20201 (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
20202
20203 // Add the segments to the pendingBuffers array
20204 this.pendingBuffers_.push(segment);
20205 return;
20206 }
20207
20208 /**
20209 * When we get a done event from the transmuxer
20210 * we call this function and we process all
20211 * of the pending data that we have been saving in the
20212 * data_ function
20213 *
20214 * @private
20215 * @param {Event} event the done event from the transmuxer
20216 */
20217 }, {
20218 key: 'done_',
20219 value: function done_(event) {
20220 // Don't process and append data if the mediaSource is closed
20221 if (this.mediaSource_.readyState === 'closed') {
20222 this.pendingBuffers_.length = 0;
20223 return;
20224 }
20225
20226 // All buffers should have been flushed from the muxer
20227 // start processing anything we have received
20228 this.processPendingSegments_();
20229 return;
20230 }
20231
20232 /**
20233 * Create our internal native audio/video source buffers and add
20234 * event handlers to them with the following conditions:
20235 * 1. they do not already exist on the mediaSource
20236 * 2. this VSB has a codec for them
20237 *
20238 * @private
20239 */
20240 }, {
20241 key: 'createRealSourceBuffers_',
20242 value: function createRealSourceBuffers_() {
20243 var _this2 = this;
20244
20245 var types = ['audio', 'video'];
20246
20247 types.forEach(function (type) {
20248 // Don't create a SourceBuffer of this type if we don't have a
20249 // codec for it
20250 if (!_this2[type + 'Codec_']) {
20251 return;
20252 }
20253
20254 // Do nothing if a SourceBuffer of this type already exists
20255 if (_this2[type + 'Buffer_']) {
20256 return;
20257 }
20258
20259 var buffer = null;
20260
20261 // If the mediasource already has a SourceBuffer for the codec
20262 // use that
20263 if (_this2.mediaSource_[type + 'Buffer_']) {
20264 buffer = _this2.mediaSource_[type + 'Buffer_'];
20265 // In multiple audio track cases, the audio source buffer is disabled
20266 // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
20267 // than createRealSourceBuffers_ is called to create the second
20268 // VirtualSourceBuffer because that happens as a side-effect of
20269 // videojs-contrib-hls starting the audioSegmentLoader. As a result,
20270 // the audioBuffer is essentially "ownerless" and no one will toggle
20271 // the `updating` state back to false once the `updateend` event is received
20272 //
20273 // Setting `updating` to false manually will work around this
20274 // situation and allow work to continue
20275 buffer.updating = false;
20276 } else {
20277 var codecProperty = type + 'Codec_';
20278 var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';
20279
20280 buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);
20281
20282 _this2.mediaSource_[type + 'Buffer_'] = buffer;
20283 }
20284
20285 _this2[type + 'Buffer_'] = buffer;
20286
20287 // Wire up the events to the SourceBuffer
20288 ['update', 'updatestart', 'updateend'].forEach(function (event) {
20289 buffer.addEventListener(event, function () {
20290 // if audio is disabled
20291 if (type === 'audio' && _this2.audioDisabled_) {
20292 return;
20293 }
20294
20295 if (event === 'updateend') {
20296 _this2[type + 'Buffer_'].updating = false;
20297 }
20298
20299 var shouldTrigger = types.every(function (t) {
20300 // skip checking audio's updating status if audio
20301 // is not enabled
20302 if (t === 'audio' && _this2.audioDisabled_) {
20303 return true;
20304 }
20305 // if the other type if updating we don't trigger
20306 if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
20307 return false;
20308 }
20309 return true;
20310 });
20311
20312 if (shouldTrigger) {
20313 return _this2.trigger(event);
20314 }
20315 });
20316 });
20317 });
20318 }
20319
20320 /**
20321 * Emulate the native mediasource function, but our function will
20322 * send all of the proposed segments to the transmuxer so that we
20323 * can transmux them before we append them to our internal
20324 * native source buffers in the correct format.
20325 *
20326 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
20327 * @param {Uint8Array} segment the segment to append to the buffer
20328 */
20329 }, {
20330 key: 'appendBuffer',
20331 value: function appendBuffer(segment) {
20332 // Start the internal "updating" state
20333 this.bufferUpdating_ = true;
20334
20335 if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
20336 var audioBuffered = this.audioBuffer_.buffered;
20337
20338 this.transmuxer_.postMessage({
20339 action: 'setAudioAppendStart',
20340 appendStart: audioBuffered.end(audioBuffered.length - 1)
20341 });
20342 }
20343
20344 if (this.videoBuffer_) {
20345 this.transmuxer_.postMessage({
20346 action: 'alignGopsWith',
20347 gopsToAlignWith: gopsSafeToAlignWith(this.gopBuffer_, this.mediaSource_.player_, this.timeMapping_)
20348 });
20349 }
20350
20351 this.transmuxer_.postMessage({
20352 action: 'push',
20353 // Send the typed-array of data as an ArrayBuffer so that
20354 // it can be sent as a "Transferable" and avoid the costly
20355 // memory copy
20356 data: segment.buffer,
20357
20358 // To recreate the original typed-array, we need information
20359 // about what portion of the ArrayBuffer it was a view into
20360 byteOffset: segment.byteOffset,
20361 byteLength: segment.byteLength
20362 }, [segment.buffer]);
20363 this.transmuxer_.postMessage({ action: 'flush' });
20364 }
20365
20366 /**
20367 * Appends gop information (timing and byteLength) received by the transmuxer for the
20368 * gops appended in the last call to appendBuffer
20369 *
20370 * @param {Event} event
20371 * The gopInfo event from the transmuxer
20372 * @param {Array} event.data.gopInfo
20373 * List of gop info to append
20374 */
20375 }, {
20376 key: 'appendGopInfo_',
20377 value: function appendGopInfo_(event) {
20378 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, event.data.gopInfo, this.safeAppend_);
20379 }
20380
20381 /**
20382 * Emulate the native mediasource function and remove parts
20383 * of the buffer from any of our internal buffers that exist
20384 *
20385 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
20386 * @param {Double} start position to start the remove at
20387 * @param {Double} end position to end the remove at
20388 */
20389 }, {
20390 key: 'remove',
20391 value: function remove(start, end) {
20392 if (this.videoBuffer_) {
20393 this.videoBuffer_.updating = true;
20394 this.videoBuffer_.remove(start, end);
20395 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
20396 }
20397 if (!this.audioDisabled_ && this.audioBuffer_) {
20398 this.audioBuffer_.updating = true;
20399 this.audioBuffer_.remove(start, end);
20400 }
20401
20402 // Remove Metadata Cues (id3)
20403 (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
20404
20405 // Remove Any Captions
20406 if (this.inbandTextTracks_) {
20407 for (var track in this.inbandTextTracks_) {
20408 (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTracks_[track]);
20409 }
20410 }
20411 }
20412
20413 /**
20414 * Process any segments that the muxer has output
20415 * Concatenate segments together based on type and append them into
20416 * their respective sourceBuffers
20417 *
20418 * @private
20419 */
20420 }, {
20421 key: 'processPendingSegments_',
20422 value: function processPendingSegments_() {
20423 var sortedSegments = {
20424 video: {
20425 segments: [],
20426 bytes: 0
20427 },
20428 audio: {
20429 segments: [],
20430 bytes: 0
20431 },
20432 captions: [],
20433 metadata: []
20434 };
20435
20436 // Sort segments into separate video/audio arrays and
20437 // keep track of their total byte lengths
20438 sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
20439 var type = segment.type;
20440 var data = segment.data;
20441 var initSegment = segment.initSegment;
20442
20443 segmentObj[type].segments.push(data);
20444 segmentObj[type].bytes += data.byteLength;
20445
20446 segmentObj[type].initSegment = initSegment;
20447
20448 // Gather any captions into a single array
20449 if (segment.captions) {
20450 segmentObj.captions = segmentObj.captions.concat(segment.captions);
20451 }
20452
20453 if (segment.info) {
20454 segmentObj[type].info = segment.info;
20455 }
20456
20457 // Gather any metadata into a single array
20458 if (segment.metadata) {
20459 segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
20460 }
20461
20462 return segmentObj;
20463 }, sortedSegments);
20464
20465 // Create the real source buffers if they don't exist by now since we
20466 // finally are sure what tracks are contained in the source
20467 if (!this.videoBuffer_ && !this.audioBuffer_) {
20468 // Remove any codecs that may have been specified by default but
20469 // are no longer applicable now
20470 if (sortedSegments.video.bytes === 0) {
20471 this.videoCodec_ = null;
20472 }
20473 if (sortedSegments.audio.bytes === 0) {
20474 this.audioCodec_ = null;
20475 }
20476
20477 this.createRealSourceBuffers_();
20478 }
20479
20480 if (sortedSegments.audio.info) {
20481 this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
20482 }
20483 if (sortedSegments.video.info) {
20484 this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
20485 }
20486
20487 if (this.appendAudioInitSegment_) {
20488 if (!this.audioDisabled_ && this.audioBuffer_) {
20489 sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
20490 sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
20491 }
20492 this.appendAudioInitSegment_ = false;
20493 }
20494
20495 var triggerUpdateend = false;
20496
20497 // Merge multiple video and audio segments into one and append
20498 if (this.videoBuffer_ && sortedSegments.video.bytes) {
20499 sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
20500 sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
20501 this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
20502 // TODO: are video tracks the only ones with text tracks?
20503 (0, _addTextTrackData.addTextTrackData)(this, sortedSegments.captions, sortedSegments.metadata);
20504 } else if (this.videoBuffer_ && (this.audioDisabled_ || !this.audioBuffer_)) {
20505 // The transmuxer did not return any bytes of video, meaning it was all trimmed
20506 // for gop alignment. Since we have a video buffer and audio is disabled, updateend
20507 // will never be triggered by this source buffer, which will cause contrib-hls
20508 // to be stuck forever waiting for updateend. If audio is not disabled, updateend
20509 // will be triggered by the audio buffer, which will be sent upwards since the video
20510 // buffer will not be in an updating state.
20511 triggerUpdateend = true;
20512 }
20513
20514 if (!this.audioDisabled_ && this.audioBuffer_) {
20515 this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
20516 }
20517
20518 this.pendingBuffers_.length = 0;
20519
20520 if (triggerUpdateend) {
20521 this.trigger('updateend');
20522 }
20523
20524 // We are no longer in the internal "updating" state
20525 this.bufferUpdating_ = false;
20526 }
20527
20528 /**
20529 * Combine all segments into a single Uint8Array and then append them
20530 * to the destination buffer
20531 *
20532 * @param {Object} segmentObj
20533 * @param {SourceBuffer} destinationBuffer native source buffer to append data to
20534 * @private
20535 */
20536 }, {
20537 key: 'concatAndAppendSegments_',
20538 value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
20539 var offset = 0;
20540 var tempBuffer = undefined;
20541
20542 if (segmentObj.bytes) {
20543 tempBuffer = new Uint8Array(segmentObj.bytes);
20544
20545 // Combine the individual segments into one large typed-array
20546 segmentObj.segments.forEach(function (segment) {
20547 tempBuffer.set(segment, offset);
20548 offset += segment.byteLength;
20549 });
20550
20551 try {
20552 destinationBuffer.updating = true;
20553 destinationBuffer.appendBuffer(tempBuffer);
20554 } catch (error) {
20555 if (this.mediaSource_.player_) {
20556 this.mediaSource_.player_.error({
20557 code: -3,
20558 type: 'APPEND_BUFFER_ERR',
20559 message: error.message,
20560 originalError: error
20561 });
20562 }
20563 }
20564 }
20565 }
20566
20567 /**
20568 * Emulate the native mediasource function. abort any soureBuffer
20569 * actions and throw out any un-appended data.
20570 *
20571 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
20572 */
20573 }, {
20574 key: 'abort',
20575 value: function abort() {
20576 if (this.videoBuffer_) {
20577 this.videoBuffer_.abort();
20578 }
20579 if (!this.audioDisabled_ && this.audioBuffer_) {
20580 this.audioBuffer_.abort();
20581 }
20582 if (this.transmuxer_) {
20583 this.transmuxer_.postMessage({ action: 'reset' });
20584 }
20585 this.pendingBuffers_.length = 0;
20586 this.bufferUpdating_ = false;
20587 }
20588 }]);
20589
20590 return VirtualSourceBuffer;
20591})(_videoJs2['default'].EventTarget);
20592
20593exports['default'] = VirtualSourceBuffer;
20594}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
20595},{"./add-text-track-data":64,"./codec-utils":65,"./create-text-tracks-if-necessary":66,"./remove-cues-from-track":72,"./transmuxer-worker":73,"webworkify":76}],76:[function(require,module,exports){
20596var bundleFn = arguments[3];
20597var sources = arguments[4];
20598var cache = arguments[5];
20599
20600var stringify = JSON.stringify;
20601
20602module.exports = function (fn) {
20603 var keys = [];
20604 var wkey;
20605 var cacheKeys = Object.keys(cache);
20606
20607 for (var i = 0, l = cacheKeys.length; i < l; i++) {
20608 var key = cacheKeys[i];
20609 if (cache[key].exports === fn) {
20610 wkey = key;
20611 break;
20612 }
20613 }
20614
20615 if (!wkey) {
20616 wkey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
20617 var wcache = {};
20618 for (var i = 0, l = cacheKeys.length; i < l; i++) {
20619 var key = cacheKeys[i];
20620 wcache[key] = key;
20621 }
20622 sources[wkey] = [
20623 Function(['require','module','exports'], '(' + fn + ')(self)'),
20624 wcache
20625 ];
20626 }
20627 var skey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
20628
20629 var scache = {}; scache[wkey] = wkey;
20630 sources[skey] = [
20631 Function(['require'],'require(' + stringify(wkey) + ')(self)'),
20632 scache
20633 ];
20634
20635 var src = '(' + bundleFn + ')({'
20636 + Object.keys(sources).map(function (key) {
20637 return stringify(key) + ':['
20638 + sources[key][0]
20639 + ',' + stringify(sources[key][1]) + ']'
20640 ;
20641 }).join(',')
20642 + '},{},[' + stringify(skey) + '])'
20643 ;
20644
20645 var URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
20646
20647 return new Worker(URL.createObjectURL(
20648 new Blob([src], { type: 'text/javascript' })
20649 ));
20650};
20651
20652},{}],77:[function(require,module,exports){
20653(function (global){
20654/**
20655 * @file videojs-contrib-hls.js
20656 *
20657 * The main file for the HLS project.
20658 * License: https://github.com/videojs/videojs-contrib-hls/blob/master/LICENSE
20659 */
20660'use strict';
20661
20662var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
20663
20664var _get = function get(_x4, _x5, _x6) { var _again = true; _function: while (_again) { var object = _x4, property = _x5, receiver = _x6; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x4 = parent; _x5 = property; _x6 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
20665
20666function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
20667
20668function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
20669
20670function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
20671
20672var _globalDocument = require('global/document');
20673
20674var _globalDocument2 = _interopRequireDefault(_globalDocument);
20675
20676var _playlistLoader = require('./playlist-loader');
20677
20678var _playlistLoader2 = _interopRequireDefault(_playlistLoader);
20679
20680var _playlist = require('./playlist');
20681
20682var _playlist2 = _interopRequireDefault(_playlist);
20683
20684var _xhr = require('./xhr');
20685
20686var _xhr2 = _interopRequireDefault(_xhr);
20687
20688var _aesDecrypter = require('aes-decrypter');
20689
20690var _binUtils = require('./bin-utils');
20691
20692var _binUtils2 = _interopRequireDefault(_binUtils);
20693
20694var _videojsContribMediaSources = require('videojs-contrib-media-sources');
20695
20696var _m3u8Parser = require('m3u8-parser');
20697
20698var _m3u8Parser2 = _interopRequireDefault(_m3u8Parser);
20699
20700var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
20701
20702var _videoJs2 = _interopRequireDefault(_videoJs);
20703
20704var _masterPlaylistController = require('./master-playlist-controller');
20705
20706var _config = require('./config');
20707
20708var _config2 = _interopRequireDefault(_config);
20709
20710var _renditionMixin = require('./rendition-mixin');
20711
20712var _renditionMixin2 = _interopRequireDefault(_renditionMixin);
20713
20714var _globalWindow = require('global/window');
20715
20716var _globalWindow2 = _interopRequireDefault(_globalWindow);
20717
20718var _playbackWatcher = require('./playback-watcher');
20719
20720var _playbackWatcher2 = _interopRequireDefault(_playbackWatcher);
20721
20722var _reloadSourceOnError = require('./reload-source-on-error');
20723
20724var _reloadSourceOnError2 = _interopRequireDefault(_reloadSourceOnError);
20725
20726var _playlistSelectorsJs = require('./playlist-selectors.js');
20727
20728var Hls = {
20729 PlaylistLoader: _playlistLoader2['default'],
20730 Playlist: _playlist2['default'],
20731 Decrypter: _aesDecrypter.Decrypter,
20732 AsyncStream: _aesDecrypter.AsyncStream,
20733 decrypt: _aesDecrypter.decrypt,
20734 utils: _binUtils2['default'],
20735
20736 STANDARD_PLAYLIST_SELECTOR: _playlistSelectorsJs.lastBandwidthSelector,
20737 INITIAL_PLAYLIST_SELECTOR: _playlistSelectorsJs.lowestBitrateCompatibleVariantSelector,
20738 comparePlaylistBandwidth: _playlistSelectorsJs.comparePlaylistBandwidth,
20739 comparePlaylistResolution: _playlistSelectorsJs.comparePlaylistResolution,
20740
20741 xhr: (0, _xhr2['default'])()
20742};
20743
20744// 0.5 MB/s
20745var INITIAL_BANDWIDTH = 4194304;
20746
20747// Define getter/setters for config properites
20748['GOAL_BUFFER_LENGTH', 'MAX_GOAL_BUFFER_LENGTH', 'GOAL_BUFFER_LENGTH_RATE', 'BUFFER_LOW_WATER_LINE', 'MAX_BUFFER_LOW_WATER_LINE', 'BUFFER_LOW_WATER_LINE_RATE', 'BANDWIDTH_VARIANCE'].forEach(function (prop) {
20749 Object.defineProperty(Hls, prop, {
20750 get: function get() {
20751 _videoJs2['default'].log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
20752 return _config2['default'][prop];
20753 },
20754 set: function set(value) {
20755 _videoJs2['default'].log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
20756
20757 if (typeof value !== 'number' || value < 0) {
20758 _videoJs2['default'].log.warn('value of Hls.' + prop + ' must be greater than or equal to 0');
20759 return;
20760 }
20761
20762 _config2['default'][prop] = value;
20763 }
20764 });
20765});
20766
20767/**
20768 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.
20769 *
20770 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
20771 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
20772 * @function handleHlsMediaChange
20773 */
20774var handleHlsMediaChange = function handleHlsMediaChange(qualityLevels, playlistLoader) {
20775 var newPlaylist = playlistLoader.media();
20776 var selectedIndex = -1;
20777
20778 for (var i = 0; i < qualityLevels.length; i++) {
20779 if (qualityLevels[i].id === newPlaylist.uri) {
20780 selectedIndex = i;
20781 break;
20782 }
20783 }
20784
20785 qualityLevels.selectedIndex_ = selectedIndex;
20786 qualityLevels.trigger({
20787 selectedIndex: selectedIndex,
20788 type: 'change'
20789 });
20790};
20791
20792/**
20793 * Adds quality levels to list once playlist metadata is available
20794 *
20795 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
20796 * @param {Object} hls Hls object to listen to for media events.
20797 * @function handleHlsLoadedMetadata
20798 */
20799var handleHlsLoadedMetadata = function handleHlsLoadedMetadata(qualityLevels, hls) {
20800 hls.representations().forEach(function (rep) {
20801 qualityLevels.addQualityLevel(rep);
20802 });
20803 handleHlsMediaChange(qualityLevels, hls.playlists);
20804};
20805
20806// HLS is a source handler, not a tech. Make sure attempts to use it
20807// as one do not cause exceptions.
20808Hls.canPlaySource = function () {
20809 return _videoJs2['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
20810};
20811
20812/**
20813 * Whether the browser has built-in HLS support.
20814 */
20815Hls.supportsNativeHls = (function () {
20816 var video = _globalDocument2['default'].createElement('video');
20817
20818 // native HLS is definitely not supported if HTML5 video isn't
20819 if (!_videoJs2['default'].getTech('Html5').isSupported()) {
20820 return false;
20821 }
20822
20823 // HLS manifests can go by many mime-types
20824 var canPlay = [
20825 // Apple santioned
20826 'application/vnd.apple.mpegurl',
20827 // Apple sanctioned for backwards compatibility
20828 'audio/mpegurl',
20829 // Very common
20830 'audio/x-mpegurl',
20831 // Very common
20832 'application/x-mpegurl',
20833 // Included for completeness
20834 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
20835
20836 return canPlay.some(function (canItPlay) {
20837 return (/maybe|probably/i.test(video.canPlayType(canItPlay))
20838 );
20839 });
20840})();
20841
20842/**
20843 * HLS is a source handler, not a tech. Make sure attempts to use it
20844 * as one do not cause exceptions.
20845 */
20846Hls.isSupported = function () {
20847 return _videoJs2['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
20848};
20849
20850var Component = _videoJs2['default'].getComponent('Component');
20851
20852/**
20853 * The Hls Handler object, where we orchestrate all of the parts
20854 * of HLS to interact with video.js
20855 *
20856 * @class HlsHandler
20857 * @extends videojs.Component
20858 * @param {Object} source the soruce object
20859 * @param {Tech} tech the parent tech object
20860 * @param {Object} options optional and required options
20861 */
20862
20863var HlsHandler = (function (_Component) {
20864 _inherits(HlsHandler, _Component);
20865
20866 function HlsHandler(source, tech, options) {
20867 var _this = this;
20868
20869 _classCallCheck(this, HlsHandler);
20870
20871 _get(Object.getPrototypeOf(HlsHandler.prototype), 'constructor', this).call(this, tech, options.hls);
20872
20873 // tech.player() is deprecated but setup a reference to HLS for
20874 // backwards-compatibility
20875 if (tech.options_ && tech.options_.playerId) {
20876 var _player = (0, _videoJs2['default'])(tech.options_.playerId);
20877
20878 if (!_player.hasOwnProperty('hls')) {
20879 Object.defineProperty(_player, 'hls', {
20880 get: function get() {
20881 _videoJs2['default'].log.warn('player.hls is deprecated. Use player.tech_.hls instead.');
20882 tech.trigger({ type: 'usage', name: 'hls-player-access' });
20883 return _this;
20884 }
20885 });
20886 }
20887 }
20888
20889 this.tech_ = tech;
20890 this.source_ = source;
20891 this.stats = {};
20892 this.ignoreNextSeekingEvent_ = false;
20893 this.setOptions_();
20894
20895 // overriding native HLS only works if audio tracks have been emulated
20896 // error early if we're misconfigured:
20897 if (this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
20898 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
20899 }
20900
20901 // listen for fullscreenchange events for this player so that we
20902 // can adjust our quality selection quickly
20903 this.on(_globalDocument2['default'], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
20904 var fullscreenElement = _globalDocument2['default'].fullscreenElement || _globalDocument2['default'].webkitFullscreenElement || _globalDocument2['default'].mozFullScreenElement || _globalDocument2['default'].msFullscreenElement;
20905
20906 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
20907 _this.masterPlaylistController_.fastQualityChange_();
20908 }
20909 });
20910
20911 this.on(this.tech_, 'seeking', function () {
20912 if (this.ignoreNextSeekingEvent_) {
20913 this.ignoreNextSeekingEvent_ = false;
20914 return;
20915 }
20916
20917 this.setCurrentTime(this.tech_.currentTime());
20918 });
20919 this.on(this.tech_, 'error', function () {
20920 if (this.masterPlaylistController_) {
20921 this.masterPlaylistController_.pauseLoading();
20922 }
20923 });
20924
20925 this.on(this.tech_, 'play', this.play);
20926 }
20927
20928 /**
20929 * The Source Handler object, which informs video.js what additional
20930 * MIME types are supported and sets up playback. It is registered
20931 * automatically to the appropriate tech based on the capabilities of
20932 * the browser it is running in. It is not necessary to use or modify
20933 * this object in normal usage.
20934 */
20935
20936 _createClass(HlsHandler, [{
20937 key: 'setOptions_',
20938 value: function setOptions_() {
20939 var _this2 = this;
20940
20941 // defaults
20942 this.options_.withCredentials = this.options_.withCredentials || false;
20943
20944 if (typeof this.options_.blacklistDuration !== 'number') {
20945 this.options_.blacklistDuration = 5 * 60;
20946 }
20947
20948 // start playlist selection at a reasonable bandwidth for
20949 // broadband internet (0.5 MB/s) or mobile (0.0625 MB/s)
20950 if (typeof this.options_.bandwidth !== 'number') {
20951 this.options_.bandwidth = INITIAL_BANDWIDTH;
20952 }
20953
20954 // If the bandwidth number is unchanged from the initial setting
20955 // then this takes precedence over the enableLowInitialPlaylist option
20956 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === INITIAL_BANDWIDTH;
20957
20958 // grab options passed to player.src
20959 ['withCredentials', 'bandwidth'].forEach(function (option) {
20960 if (typeof _this2.source_[option] !== 'undefined') {
20961 _this2.options_[option] = _this2.source_[option];
20962 }
20963 });
20964
20965 this.bandwidth = this.options_.bandwidth;
20966 }
20967
20968 /**
20969 * called when player.src gets called, handle a new source
20970 *
20971 * @param {Object} src the source object to handle
20972 */
20973 }, {
20974 key: 'src',
20975 value: function src(_src) {
20976 var _this3 = this;
20977
20978 // do nothing if the src is falsey
20979 if (!_src) {
20980 return;
20981 }
20982 this.setOptions_();
20983 // add master playlist controller options
20984 this.options_.url = this.source_.src;
20985 this.options_.tech = this.tech_;
20986 this.options_.externHls = Hls;
20987
20988 this.masterPlaylistController_ = new _masterPlaylistController.MasterPlaylistController(this.options_);
20989 this.playbackWatcher_ = new _playbackWatcher2['default'](_videoJs2['default'].mergeOptions(this.options_, {
20990 seekable: function seekable() {
20991 return _this3.seekable();
20992 }
20993 }));
20994
20995 this.masterPlaylistController_.on('error', function () {
20996 var player = _videoJs2['default'].players[_this3.tech_.options_.playerId];
20997
20998 player.error(_this3.masterPlaylistController_.error);
20999 });
21000
21001 // `this` in selectPlaylist should be the HlsHandler for backwards
21002 // compatibility with < v2
21003 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : Hls.STANDARD_PLAYLIST_SELECTOR.bind(this);
21004
21005 this.masterPlaylistController_.selectInitialPlaylist = Hls.INITIAL_PLAYLIST_SELECTOR.bind(this);
21006
21007 // re-expose some internal objects for backwards compatibility with < v2
21008 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
21009 this.mediaSource = this.masterPlaylistController_.mediaSource;
21010
21011 // Proxy assignment of some properties to the master playlist
21012 // controller. Using a custom property for backwards compatibility
21013 // with < v2
21014 Object.defineProperties(this, {
21015 selectPlaylist: {
21016 get: function get() {
21017 return this.masterPlaylistController_.selectPlaylist;
21018 },
21019 set: function set(selectPlaylist) {
21020 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
21021 }
21022 },
21023 throughput: {
21024 get: function get() {
21025 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
21026 },
21027 set: function set(throughput) {
21028 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput;
21029 // By setting `count` to 1 the throughput value becomes the starting value
21030 // for the cumulative average
21031 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
21032 }
21033 },
21034 bandwidth: {
21035 get: function get() {
21036 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
21037 },
21038 set: function set(bandwidth) {
21039 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth;
21040 // setting the bandwidth manually resets the throughput counter
21041 // `count` is set to zero that current value of `rate` isn't included
21042 // in the cumulative average
21043 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
21044 rate: 0,
21045 count: 0
21046 };
21047 }
21048 },
21049 /**
21050 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
21051 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
21052 * the entire process after that - decryption, transmuxing, and appending - provided
21053 * by `throughput`.
21054 *
21055 * Since the two process are serial, the overall system bandwidth is given by:
21056 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
21057 */
21058 systemBandwidth: {
21059 get: function get() {
21060 var invBandwidth = 1 / (this.bandwidth || 1);
21061 var invThroughput = undefined;
21062
21063 if (this.throughput > 0) {
21064 invThroughput = 1 / this.throughput;
21065 } else {
21066 invThroughput = 0;
21067 }
21068
21069 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
21070
21071 return systemBitrate;
21072 },
21073 set: function set() {
21074 _videoJs2['default'].log.error('The "systemBandwidth" property is read-only');
21075 }
21076 }
21077 });
21078
21079 Object.defineProperties(this.stats, {
21080 bandwidth: {
21081 get: function get() {
21082 return _this3.bandwidth || 0;
21083 },
21084 enumerable: true
21085 },
21086 mediaRequests: {
21087 get: function get() {
21088 return _this3.masterPlaylistController_.mediaRequests_() || 0;
21089 },
21090 enumerable: true
21091 },
21092 mediaRequestsAborted: {
21093 get: function get() {
21094 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
21095 },
21096 enumerable: true
21097 },
21098 mediaRequestsTimedout: {
21099 get: function get() {
21100 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
21101 },
21102 enumerable: true
21103 },
21104 mediaRequestsErrored: {
21105 get: function get() {
21106 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
21107 },
21108 enumerable: true
21109 },
21110 mediaTransferDuration: {
21111 get: function get() {
21112 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
21113 },
21114 enumerable: true
21115 },
21116 mediaBytesTransferred: {
21117 get: function get() {
21118 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
21119 },
21120 enumerable: true
21121 },
21122 mediaSecondsLoaded: {
21123 get: function get() {
21124 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
21125 },
21126 enumerable: true
21127 }
21128 });
21129
21130 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
21131
21132 this.masterPlaylistController_.on('selectedinitialmedia', function () {
21133 // Add the manual rendition mix-in to HlsHandler
21134 (0, _renditionMixin2['default'])(_this3);
21135 });
21136
21137 // the bandwidth of the primary segment loader is our best
21138 // estimate of overall bandwidth
21139 this.on(this.masterPlaylistController_, 'progress', function () {
21140 this.tech_.trigger('progress');
21141 });
21142
21143 // In the live case, we need to ignore the very first `seeking` event since
21144 // that will be the result of the seek-to-live behavior
21145 this.on(this.masterPlaylistController_, 'firstplay', function () {
21146 this.ignoreNextSeekingEvent_ = true;
21147 });
21148
21149 this.tech_.ready(function () {
21150 return _this3.setupQualityLevels_();
21151 });
21152
21153 // do nothing if the tech has been disposed already
21154 // this can occur if someone sets the src in player.ready(), for instance
21155 if (!this.tech_.el()) {
21156 return;
21157 }
21158
21159 this.tech_.src(_videoJs2['default'].URL.createObjectURL(this.masterPlaylistController_.mediaSource));
21160 }
21161
21162 /**
21163 * Initializes the quality levels and sets listeners to update them.
21164 *
21165 * @method setupQualityLevels_
21166 * @private
21167 */
21168 }, {
21169 key: 'setupQualityLevels_',
21170 value: function setupQualityLevels_() {
21171 var _this4 = this;
21172
21173 var player = _videoJs2['default'].players[this.tech_.options_.playerId];
21174
21175 if (player && player.qualityLevels) {
21176 this.qualityLevels_ = player.qualityLevels();
21177
21178 this.masterPlaylistController_.on('selectedinitialmedia', function () {
21179 handleHlsLoadedMetadata(_this4.qualityLevels_, _this4);
21180 });
21181
21182 this.playlists.on('mediachange', function () {
21183 handleHlsMediaChange(_this4.qualityLevels_, _this4.playlists);
21184 });
21185 }
21186 }
21187
21188 /**
21189 * Begin playing the video.
21190 */
21191 }, {
21192 key: 'play',
21193 value: function play() {
21194 this.masterPlaylistController_.play();
21195 }
21196
21197 /**
21198 * a wrapper around the function in MasterPlaylistController
21199 */
21200 }, {
21201 key: 'setCurrentTime',
21202 value: function setCurrentTime(currentTime) {
21203 this.masterPlaylistController_.setCurrentTime(currentTime);
21204 }
21205
21206 /**
21207 * a wrapper around the function in MasterPlaylistController
21208 */
21209 }, {
21210 key: 'duration',
21211 value: function duration() {
21212 return this.masterPlaylistController_.duration();
21213 }
21214
21215 /**
21216 * a wrapper around the function in MasterPlaylistController
21217 */
21218 }, {
21219 key: 'seekable',
21220 value: function seekable() {
21221 return this.masterPlaylistController_.seekable();
21222 }
21223
21224 /**
21225 * Abort all outstanding work and cleanup.
21226 */
21227 }, {
21228 key: 'dispose',
21229 value: function dispose() {
21230 if (this.playbackWatcher_) {
21231 this.playbackWatcher_.dispose();
21232 }
21233 if (this.masterPlaylistController_) {
21234 this.masterPlaylistController_.dispose();
21235 }
21236 if (this.qualityLevels_) {
21237 this.qualityLevels_.dispose();
21238 }
21239 _get(Object.getPrototypeOf(HlsHandler.prototype), 'dispose', this).call(this);
21240 }
21241 }]);
21242
21243 return HlsHandler;
21244})(Component);
21245
21246var HlsSourceHandler = function HlsSourceHandler(mode) {
21247 return {
21248 canHandleSource: function canHandleSource(srcObj) {
21249 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
21250
21251 var localOptions = _videoJs2['default'].mergeOptions(_videoJs2['default'].options, options);
21252
21253 // this forces video.js to skip this tech/mode if its not the one we have been
21254 // overriden to use, by returing that we cannot handle the source.
21255 if (localOptions.hls && localOptions.hls.mode && localOptions.hls.mode !== mode) {
21256 return false;
21257 }
21258 return HlsSourceHandler.canPlayType(srcObj.type, localOptions);
21259 },
21260 handleSource: function handleSource(source, tech) {
21261 var options = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2];
21262
21263 var localOptions = _videoJs2['default'].mergeOptions(_videoJs2['default'].options, options, { hls: { mode: mode } });
21264
21265 if (mode === 'flash') {
21266 // We need to trigger this asynchronously to give others the chance
21267 // to bind to the event when a source is set at player creation
21268 tech.setTimeout(function () {
21269 tech.trigger('loadstart');
21270 }, 1);
21271 }
21272
21273 tech.hls = new HlsHandler(source, tech, localOptions);
21274 tech.hls.xhr = (0, _xhr2['default'])();
21275
21276 tech.hls.src(source.src);
21277 return tech.hls;
21278 },
21279 canPlayType: function canPlayType(type) {
21280 var options = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
21281
21282 var localOptions = _videoJs2['default'].mergeOptions(_videoJs2['default'].options, options);
21283
21284 if (HlsSourceHandler.canPlayType(type, localOptions)) {
21285 return 'maybe';
21286 }
21287 return '';
21288 }
21289 };
21290};
21291
21292HlsSourceHandler.canPlayType = function (type, options) {
21293 // No support for IE 10 or below
21294 if (_videoJs2['default'].browser.IE_VERSION && _videoJs2['default'].browser.IE_VERSION <= 10) {
21295 return false;
21296 }
21297
21298 var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
21299
21300 // favor native HLS support if it's available
21301 if (!options.hls.overrideNative && Hls.supportsNativeHls) {
21302 return false;
21303 }
21304 return mpegurlRE.test(type);
21305};
21306
21307if (typeof _videoJs2['default'].MediaSource === 'undefined' || typeof _videoJs2['default'].URL === 'undefined') {
21308 _videoJs2['default'].MediaSource = _videojsContribMediaSources.MediaSource;
21309 _videoJs2['default'].URL = _videojsContribMediaSources.URL;
21310}
21311
21312var flashTech = _videoJs2['default'].getTech('Flash');
21313
21314// register source handlers with the appropriate techs
21315if (_videojsContribMediaSources.MediaSource.supportsNativeMediaSources()) {
21316 _videoJs2['default'].getTech('Html5').registerSourceHandler(HlsSourceHandler('html5'), 0);
21317}
21318if (_globalWindow2['default'].Uint8Array && flashTech) {
21319 flashTech.registerSourceHandler(HlsSourceHandler('flash'));
21320}
21321
21322_videoJs2['default'].HlsHandler = HlsHandler;
21323_videoJs2['default'].HlsSourceHandler = HlsSourceHandler;
21324_videoJs2['default'].Hls = Hls;
21325if (!_videoJs2['default'].use) {
21326 _videoJs2['default'].registerComponent('Hls', Hls);
21327}
21328_videoJs2['default'].m3u8 = _m3u8Parser2['default'];
21329_videoJs2['default'].options.hls = _videoJs2['default'].options.hls || {};
21330
21331if (_videoJs2['default'].registerPlugin) {
21332 _videoJs2['default'].registerPlugin('reloadSourceOnError', _reloadSourceOnError2['default']);
21333} else {
21334 _videoJs2['default'].plugin('reloadSourceOnError', _reloadSourceOnError2['default']);
21335}
21336
21337module.exports = {
21338 Hls: Hls,
21339 HlsHandler: HlsHandler,
21340 HlsSourceHandler: HlsSourceHandler
21341};
21342}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
21343},{"./bin-utils":2,"./config":3,"./master-playlist-controller":5,"./playback-watcher":8,"./playlist":11,"./playlist-loader":9,"./playlist-selectors.js":10,"./reload-source-on-error":13,"./rendition-mixin":14,"./xhr":21,"aes-decrypter":25,"global/document":31,"global/window":32,"m3u8-parser":33,"videojs-contrib-media-sources":74}]},{},[77])(77)
21344});
\No newline at end of file