UNPKG

628 kBJavaScriptView Raw
1/**
2 * @videojs/http-streaming
3 * @version 1.13.4
4 * @copyright 2020 Brightcove, Inc
5 * @license Apache-2.0
6 */
7import URLToolkit from 'url-toolkit';
8import window$1 from 'global/window';
9import videojs from 'video.js';
10import { Parser } from 'm3u8-parser';
11import document from 'global/document';
12import { parse, parseUTCTiming } from 'mpd-parser';
13import mp4Inspector from 'mux.js/lib/tools/mp4-inspector';
14import mp4probe from 'mux.js/lib/mp4/probe';
15import CaptionParser from 'mux.js/lib/mp4/caption-parser';
16import tsInspector from 'mux.js/lib/tools/ts-inspector.js';
17import { Decrypter, AsyncStream, decrypt } from 'aes-decrypter';
18
19/**
20 * @file resolve-url.js - Handling how URLs are resolved and manipulated
21 */
22
23var resolveUrl = function resolveUrl(baseURL, relativeURL) {
24 // return early if we don't need to resolve
25 if (/^[a-z]+:/i.test(relativeURL)) {
26 return relativeURL;
27 }
28
29 // if the base URL is relative then combine with the current location
30 if (!/\/\//i.test(baseURL)) {
31 baseURL = URLToolkit.buildAbsoluteURL(window$1.location.href, baseURL);
32 }
33
34 return URLToolkit.buildAbsoluteURL(baseURL, relativeURL);
35};
36
37/**
38 * Checks whether xhr request was redirected and returns correct url depending
39 * on `handleManifestRedirects` option
40 *
41 * @api private
42 *
43 * @param {String} url - an url being requested
44 * @param {XMLHttpRequest} req - xhr request result
45 *
46 * @return {String}
47 */
48var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
49 // To understand how the responseURL below is set and generated:
50 // - https://fetch.spec.whatwg.org/#concept-response-url
51 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
52 if (handleManifestRedirect && req.responseURL && url !== req.responseURL) {
53 return req.responseURL;
54 }
55
56 return url;
57};
58
59var classCallCheck = function (instance, Constructor) {
60 if (!(instance instanceof Constructor)) {
61 throw new TypeError("Cannot call a class as a function");
62 }
63};
64
65var createClass = function () {
66 function defineProperties(target, props) {
67 for (var i = 0; i < props.length; i++) {
68 var descriptor = props[i];
69 descriptor.enumerable = descriptor.enumerable || false;
70 descriptor.configurable = true;
71 if ("value" in descriptor) descriptor.writable = true;
72 Object.defineProperty(target, descriptor.key, descriptor);
73 }
74 }
75
76 return function (Constructor, protoProps, staticProps) {
77 if (protoProps) defineProperties(Constructor.prototype, protoProps);
78 if (staticProps) defineProperties(Constructor, staticProps);
79 return Constructor;
80 };
81}();
82
83var get = function get(object, property, receiver) {
84 if (object === null) object = Function.prototype;
85 var desc = Object.getOwnPropertyDescriptor(object, property);
86
87 if (desc === undefined) {
88 var parent = Object.getPrototypeOf(object);
89
90 if (parent === null) {
91 return undefined;
92 } else {
93 return get(parent, property, receiver);
94 }
95 } else if ("value" in desc) {
96 return desc.value;
97 } else {
98 var getter = desc.get;
99
100 if (getter === undefined) {
101 return undefined;
102 }
103
104 return getter.call(receiver);
105 }
106};
107
108var inherits = function (subClass, superClass) {
109 if (typeof superClass !== "function" && superClass !== null) {
110 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
111 }
112
113 subClass.prototype = Object.create(superClass && superClass.prototype, {
114 constructor: {
115 value: subClass,
116 enumerable: false,
117 writable: true,
118 configurable: true
119 }
120 });
121 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
122};
123
124var possibleConstructorReturn = function (self, call) {
125 if (!self) {
126 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
127 }
128
129 return call && (typeof call === "object" || typeof call === "function") ? call : self;
130};
131
132var slicedToArray = function () {
133 function sliceIterator(arr, i) {
134 var _arr = [];
135 var _n = true;
136 var _d = false;
137 var _e = undefined;
138
139 try {
140 for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
141 _arr.push(_s.value);
142
143 if (i && _arr.length === i) break;
144 }
145 } catch (err) {
146 _d = true;
147 _e = err;
148 } finally {
149 try {
150 if (!_n && _i["return"]) _i["return"]();
151 } finally {
152 if (_d) throw _e;
153 }
154 }
155
156 return _arr;
157 }
158
159 return function (arr, i) {
160 if (Array.isArray(arr)) {
161 return arr;
162 } else if (Symbol.iterator in Object(arr)) {
163 return sliceIterator(arr, i);
164 } else {
165 throw new TypeError("Invalid attempt to destructure non-iterable instance");
166 }
167 };
168}();
169
170/**
171 * @file playlist-loader.js
172 *
173 * A state machine that manages the loading, caching, and updating of
174 * M3U8 playlists.
175 *
176 */
177
178var mergeOptions = videojs.mergeOptions,
179 EventTarget = videojs.EventTarget,
180 log = videojs.log;
181
182/**
183 * Loops through all supported media groups in master and calls the provided
184 * callback for each group
185 *
186 * @param {Object} master
187 * The parsed master manifest object
188 * @param {Function} callback
189 * Callback to call for each media group
190 */
191
192var forEachMediaGroup = function forEachMediaGroup(master, callback) {
193 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
194 for (var groupKey in master.mediaGroups[mediaType]) {
195 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
196 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
197
198 callback(mediaProperties, mediaType, groupKey, labelKey);
199 }
200 }
201 });
202};
203
204/**
205 * Returns a new array of segments that is the result of merging
206 * properties from an older list of segments onto an updated
207 * list. No properties on the updated playlist will be overridden.
208 *
209 * @param {Array} original the outdated list of segments
210 * @param {Array} update the updated list of segments
211 * @param {Number=} offset the index of the first update
212 * segment in the original segment list. For non-live playlists,
213 * this should always be zero and does not need to be
214 * specified. For live playlists, it should be the difference
215 * between the media sequence numbers in the original and updated
216 * playlists.
217 * @return a list of merged segment objects
218 */
219var updateSegments = function updateSegments(original, update, offset) {
220 var result = update.slice();
221
222 offset = offset || 0;
223 var length = Math.min(original.length, update.length + offset);
224
225 for (var i = offset; i < length; i++) {
226 result[i - offset] = mergeOptions(original[i], result[i - offset]);
227 }
228 return result;
229};
230
231var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
232 if (!segment.resolvedUri) {
233 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
234 }
235 if (segment.key && !segment.key.resolvedUri) {
236 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
237 }
238 if (segment.map && !segment.map.resolvedUri) {
239 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
240 }
241};
242
243/**
244 * Returns a new master playlist that is the result of merging an
245 * updated media playlist into the original version. If the
246 * updated media playlist does not match any of the playlist
247 * entries in the original master playlist, null is returned.
248 *
249 * @param {Object} master a parsed master M3U8 object
250 * @param {Object} media a parsed media M3U8 object
251 * @return {Object} a new object that represents the original
252 * master playlist with the updated media playlist merged in, or
253 * null if the merge produced no change.
254 */
255var updateMaster = function updateMaster(master, media) {
256 var result = mergeOptions(master, {});
257 var playlist = result.playlists[media.id];
258
259 if (!playlist) {
260 return null;
261 }
262
263 // consider the playlist unchanged if the number of segments is equal, the media
264 // sequence number is unchanged, and this playlist hasn't become the end of the playlist
265 if (playlist.segments && media.segments && playlist.segments.length === media.segments.length && playlist.endList === media.endList && playlist.mediaSequence === media.mediaSequence) {
266 return null;
267 }
268
269 var mergedPlaylist = mergeOptions(playlist, media);
270
271 // if the update could overlap existing segment information, merge the two segment lists
272 if (playlist.segments) {
273 mergedPlaylist.segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
274 }
275
276 // resolve any segment URIs to prevent us from having to do it later
277 mergedPlaylist.segments.forEach(function (segment) {
278 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
279 });
280
281 // TODO Right now in the playlists array there are two references to each playlist, one
282 // that is referenced by index, and one by URI. The index reference may no longer be
283 // necessary.
284 for (var i = 0; i < result.playlists.length; i++) {
285 if (result.playlists[i].id === media.id) {
286 result.playlists[i] = mergedPlaylist;
287 }
288 }
289 result.playlists[media.id] = mergedPlaylist;
290 // URI reference added for backwards compatibility
291 result.playlists[media.uri] = mergedPlaylist;
292
293 return result;
294};
295
296var createPlaylistID = function createPlaylistID(index, uri) {
297 return index + '-' + uri;
298};
299
300var setupMediaPlaylists = function setupMediaPlaylists(master) {
301 // setup by-URI lookups and resolve media playlist URIs
302 var i = master.playlists.length;
303
304 while (i--) {
305 var playlist = master.playlists[i];
306
307 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
308 playlist.id = createPlaylistID(i, playlist.uri);
309
310 master.playlists[playlist.id] = playlist;
311 // URI reference added for backwards compatibility
312 master.playlists[playlist.uri] = playlist;
313
314 if (!playlist.attributes) {
315 // Although the spec states an #EXT-X-STREAM-INF tag MUST have a
316 // BANDWIDTH attribute, we can play the stream without it. This means a poorly
317 // formatted master playlist may not have an attribute list. An attributes
318 // property is added here to prevent undefined references when we encounter
319 // this scenario.
320 playlist.attributes = {};
321
322 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
323 }
324 }
325};
326
327var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
328 forEachMediaGroup(master, function (properties) {
329 if (properties.uri) {
330 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
331 }
332 });
333};
334
335/**
336 * Calculates the time to wait before refreshing a live playlist
337 *
338 * @param {Object} media
339 * The current media
340 * @param {Boolean} update
341 * True if there were any updates from the last refresh, false otherwise
342 * @return {Number}
343 * The time in ms to wait before refreshing the live playlist
344 */
345var refreshDelay = function refreshDelay(media, update) {
346 var lastSegment = media.segments[media.segments.length - 1];
347 var delay = void 0;
348
349 if (update && lastSegment && lastSegment.duration) {
350 delay = lastSegment.duration * 1000;
351 } else {
352 // if the playlist is unchanged since the last reload or last segment duration
353 // cannot be determined, try again after half the target duration
354 delay = (media.targetDuration || 10) * 500;
355 }
356 return delay;
357};
358
359/**
360 * Load a playlist from a remote location
361 *
362 * @class PlaylistLoader
363 * @extends Stream
364 * @param {String} srcUrl the url to start with
365 * @param {Boolean} withCredentials the withCredentials xhr option
366 * @constructor
367 */
368
369var PlaylistLoader = function (_EventTarget) {
370 inherits(PlaylistLoader, _EventTarget);
371
372 function PlaylistLoader(srcUrl, hls) {
373 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
374 classCallCheck(this, PlaylistLoader);
375
376 var _this = possibleConstructorReturn(this, (PlaylistLoader.__proto__ || Object.getPrototypeOf(PlaylistLoader)).call(this));
377
378 var _options$withCredenti = options.withCredentials,
379 withCredentials = _options$withCredenti === undefined ? false : _options$withCredenti,
380 _options$handleManife = options.handleManifestRedirects,
381 handleManifestRedirects = _options$handleManife === undefined ? false : _options$handleManife;
382
383
384 _this.srcUrl = srcUrl;
385 _this.hls_ = hls;
386 _this.withCredentials = withCredentials;
387 _this.handleManifestRedirects = handleManifestRedirects;
388
389 var hlsOptions = hls.options_;
390
391 _this.customTagParsers = hlsOptions && hlsOptions.customTagParsers || [];
392 _this.customTagMappers = hlsOptions && hlsOptions.customTagMappers || [];
393
394 if (!_this.srcUrl) {
395 throw new Error('A non-empty playlist URL is required');
396 }
397
398 // initialize the loader state
399 _this.state = 'HAVE_NOTHING';
400
401 // live playlist staleness timeout
402 _this.on('mediaupdatetimeout', function () {
403 if (_this.state !== 'HAVE_METADATA') {
404 // only refresh the media playlist if no other activity is going on
405 return;
406 }
407
408 _this.state = 'HAVE_CURRENT_METADATA';
409
410 _this.request = _this.hls_.xhr({
411 uri: resolveUrl(_this.master.uri, _this.media().uri),
412 withCredentials: _this.withCredentials
413 }, function (error, req) {
414 // disposed
415 if (!_this.request) {
416 return;
417 }
418
419 if (error) {
420 return _this.playlistRequestError(_this.request, _this.media(), 'HAVE_METADATA');
421 }
422
423 _this.haveMetadata(_this.request, _this.media().uri, _this.media().id);
424 });
425 });
426 return _this;
427 }
428
429 createClass(PlaylistLoader, [{
430 key: 'playlistRequestError',
431 value: function playlistRequestError(xhr, playlist, startingState) {
432 var uri = playlist.uri,
433 id = playlist.id;
434
435 // any in-flight request is now finished
436
437 this.request = null;
438
439 if (startingState) {
440 this.state = startingState;
441 }
442
443 this.error = {
444 playlist: this.master.playlists[id],
445 status: xhr.status,
446 message: 'HLS playlist request error at URL: ' + uri + '.',
447 responseText: xhr.responseText,
448 code: xhr.status >= 500 ? 4 : 2
449 };
450
451 this.trigger('error');
452 }
453
454 // update the playlist loader's state in response to a new or
455 // updated playlist.
456
457 }, {
458 key: 'haveMetadata',
459 value: function haveMetadata(xhr, url, id) {
460 var _this2 = this;
461
462 // any in-flight request is now finished
463 this.request = null;
464 this.state = 'HAVE_METADATA';
465
466 var parser = new Parser();
467
468 // adding custom tag parsers
469 this.customTagParsers.forEach(function (customParser) {
470 return parser.addParser(customParser);
471 });
472
473 // adding custom tag mappers
474 this.customTagMappers.forEach(function (mapper) {
475 return parser.addTagMapper(mapper);
476 });
477
478 parser.push(xhr.responseText);
479 parser.end();
480 parser.manifest.uri = url;
481 parser.manifest.id = id;
482 // m3u8-parser does not attach an attributes property to media playlists so make
483 // sure that the property is attached to avoid undefined reference errors
484 parser.manifest.attributes = parser.manifest.attributes || {};
485
486 // merge this playlist into the master
487 var update = updateMaster(this.master, parser.manifest);
488
489 this.targetDuration = parser.manifest.targetDuration;
490
491 if (update) {
492 this.master = update;
493 this.media_ = this.master.playlists[id];
494 } else {
495 this.trigger('playlistunchanged');
496 }
497
498 // refresh live playlists after a target duration passes
499 if (!this.media().endList) {
500 window$1.clearTimeout(this.mediaUpdateTimeout);
501 this.mediaUpdateTimeout = window$1.setTimeout(function () {
502 _this2.trigger('mediaupdatetimeout');
503 }, refreshDelay(this.media(), !!update));
504 }
505
506 this.trigger('loadedplaylist');
507 }
508
509 /**
510 * Abort any outstanding work and clean up.
511 */
512
513 }, {
514 key: 'dispose',
515 value: function dispose() {
516 this.trigger('dispose');
517 this.stopRequest();
518 window$1.clearTimeout(this.mediaUpdateTimeout);
519 window$1.clearTimeout(this.finalRenditionTimeout);
520 this.off();
521 }
522 }, {
523 key: 'stopRequest',
524 value: function stopRequest() {
525 if (this.request) {
526 var oldRequest = this.request;
527
528 this.request = null;
529 oldRequest.onreadystatechange = null;
530 oldRequest.abort();
531 }
532 }
533
534 /**
535 * When called without any arguments, returns the currently
536 * active media playlist. When called with a single argument,
537 * triggers the playlist loader to asynchronously switch to the
538 * specified media playlist. Calling this method while the
539 * loader is in the HAVE_NOTHING causes an error to be emitted
540 * but otherwise has no effect.
541 *
542 * @param {Object=} playlist the parsed media playlist
543 * object to switch to
544 * @param {Boolean=} is this the last available playlist
545 *
546 * @return {Playlist} the current loaded media
547 */
548
549 }, {
550 key: 'media',
551 value: function media(playlist, isFinalRendition) {
552 var _this3 = this;
553
554 // getter
555 if (!playlist) {
556 return this.media_;
557 }
558
559 // setter
560 if (this.state === 'HAVE_NOTHING') {
561 throw new Error('Cannot switch media playlist from ' + this.state);
562 }
563
564 // find the playlist object if the target playlist has been
565 // specified by URI
566 if (typeof playlist === 'string') {
567 if (!this.master.playlists[playlist]) {
568 throw new Error('Unknown playlist URI: ' + playlist);
569 }
570 playlist = this.master.playlists[playlist];
571 }
572
573 window$1.clearTimeout(this.finalRenditionTimeout);
574
575 if (isFinalRendition) {
576 var delay = playlist.targetDuration / 2 * 1000 || 5 * 1000;
577
578 this.finalRenditionTimeout = window$1.setTimeout(this.media.bind(this, playlist, false), delay);
579 return;
580 }
581
582 var startingState = this.state;
583 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
584
585 // switch to fully loaded playlists immediately
586 if (this.master.playlists[playlist.id].endList) {
587 // abort outstanding playlist requests
588 if (this.request) {
589 this.request.onreadystatechange = null;
590 this.request.abort();
591 this.request = null;
592 }
593 this.state = 'HAVE_METADATA';
594 this.media_ = playlist;
595
596 // trigger media change if the active media has been updated
597 if (mediaChange) {
598 this.trigger('mediachanging');
599 this.trigger('mediachange');
600 }
601 return;
602 }
603
604 // switching to the active playlist is a no-op
605 if (!mediaChange) {
606 return;
607 }
608
609 this.state = 'SWITCHING_MEDIA';
610
611 // there is already an outstanding playlist request
612 if (this.request) {
613 if (playlist.resolvedUri === this.request.url) {
614 // requesting to switch to the same playlist multiple times
615 // has no effect after the first
616 return;
617 }
618 this.request.onreadystatechange = null;
619 this.request.abort();
620 this.request = null;
621 }
622
623 // request the new playlist
624 if (this.media_) {
625 this.trigger('mediachanging');
626 }
627
628 this.request = this.hls_.xhr({
629 uri: playlist.resolvedUri,
630 withCredentials: this.withCredentials
631 }, function (error, req) {
632 // disposed
633 if (!_this3.request) {
634 return;
635 }
636
637 playlist.resolvedUri = resolveManifestRedirect(_this3.handleManifestRedirects, playlist.resolvedUri, req);
638
639 if (error) {
640 return _this3.playlistRequestError(_this3.request, playlist, startingState);
641 }
642
643 _this3.haveMetadata(req, playlist.uri, playlist.id);
644
645 // fire loadedmetadata the first time a media playlist is loaded
646 if (startingState === 'HAVE_MASTER') {
647 _this3.trigger('loadedmetadata');
648 } else {
649 _this3.trigger('mediachange');
650 }
651 });
652 }
653
654 /**
655 * pause loading of the playlist
656 */
657
658 }, {
659 key: 'pause',
660 value: function pause() {
661 this.stopRequest();
662 window$1.clearTimeout(this.mediaUpdateTimeout);
663 if (this.state === 'HAVE_NOTHING') {
664 // If we pause the loader before any data has been retrieved, its as if we never
665 // started, so reset to an unstarted state.
666 this.started = false;
667 }
668 // Need to restore state now that no activity is happening
669 if (this.state === 'SWITCHING_MEDIA') {
670 // if the loader was in the process of switching media, it should either return to
671 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
672 // playlist yet. This is determined by the existence of loader.media_
673 if (this.media_) {
674 this.state = 'HAVE_METADATA';
675 } else {
676 this.state = 'HAVE_MASTER';
677 }
678 } else if (this.state === 'HAVE_CURRENT_METADATA') {
679 this.state = 'HAVE_METADATA';
680 }
681 }
682
683 /**
684 * start loading of the playlist
685 */
686
687 }, {
688 key: 'load',
689 value: function load(isFinalRendition) {
690 var _this4 = this;
691
692 window$1.clearTimeout(this.mediaUpdateTimeout);
693
694 var media = this.media();
695
696 if (isFinalRendition) {
697 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
698
699 this.mediaUpdateTimeout = window$1.setTimeout(function () {
700 return _this4.load();
701 }, delay);
702 return;
703 }
704
705 if (!this.started) {
706 this.start();
707 return;
708 }
709
710 if (media && !media.endList) {
711 this.trigger('mediaupdatetimeout');
712 } else {
713 this.trigger('loadedplaylist');
714 }
715 }
716
717 /**
718 * start loading of the playlist
719 */
720
721 }, {
722 key: 'start',
723 value: function start() {
724 var _this5 = this;
725
726 this.started = true;
727
728 // request the specified URL
729 this.request = this.hls_.xhr({
730 uri: this.srcUrl,
731 withCredentials: this.withCredentials
732 }, function (error, req) {
733 // disposed
734 if (!_this5.request) {
735 return;
736 }
737
738 // clear the loader's request reference
739 _this5.request = null;
740
741 if (error) {
742 _this5.error = {
743 status: req.status,
744 message: 'HLS playlist request error at URL: ' + _this5.srcUrl + '.',
745 responseText: req.responseText,
746 // MEDIA_ERR_NETWORK
747 code: 2
748 };
749 if (_this5.state === 'HAVE_NOTHING') {
750 _this5.started = false;
751 }
752 return _this5.trigger('error');
753 }
754
755 var parser = new Parser();
756
757 // adding custom tag parsers
758 _this5.customTagParsers.forEach(function (customParser) {
759 return parser.addParser(customParser);
760 });
761
762 // adding custom tag mappers
763 _this5.customTagMappers.forEach(function (mapper) {
764 return parser.addTagMapper(mapper);
765 });
766
767 parser.push(req.responseText);
768 parser.end();
769
770 _this5.state = 'HAVE_MASTER';
771
772 _this5.srcUrl = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.srcUrl, req);
773
774 parser.manifest.uri = _this5.srcUrl;
775
776 // loaded a master playlist
777 if (parser.manifest.playlists) {
778 _this5.master = parser.manifest;
779
780 setupMediaPlaylists(_this5.master);
781 resolveMediaGroupUris(_this5.master);
782
783 _this5.trigger('loadedplaylist');
784 if (!_this5.request) {
785 // no media playlist was specifically selected so start
786 // from the first listed one
787 _this5.media(parser.manifest.playlists[0]);
788 }
789 return;
790 }
791
792 var id = createPlaylistID(0, _this5.srcUrl);
793
794 // loaded a media playlist
795 // infer a master playlist if none was previously requested
796 _this5.master = {
797 mediaGroups: {
798 'AUDIO': {},
799 'VIDEO': {},
800 'CLOSED-CAPTIONS': {},
801 'SUBTITLES': {}
802 },
803 uri: window$1.location.href,
804 playlists: [{
805 uri: _this5.srcUrl,
806 id: id,
807 resolvedUri: _this5.srcUrl,
808 // m3u8-parser does not attach an attributes property to media playlists so make
809 // sure that the property is attached to avoid undefined reference errors
810 attributes: {}
811 }]
812 };
813 _this5.master.playlists[id] = _this5.master.playlists[0];
814 // URI reference added for backwards compatibility
815 _this5.master.playlists[_this5.srcUrl] = _this5.master.playlists[0];
816
817 _this5.haveMetadata(req, _this5.srcUrl, id);
818 return _this5.trigger('loadedmetadata');
819 });
820 }
821 }]);
822 return PlaylistLoader;
823}(EventTarget);
824
825/**
826 * @file playlist.js
827 *
828 * Playlist related utilities.
829 */
830
831var createTimeRange = videojs.createTimeRange;
832
833/**
834 * walk backward until we find a duration we can use
835 * or return a failure
836 *
837 * @param {Playlist} playlist the playlist to walk through
838 * @param {Number} endSequence the mediaSequence to stop walking on
839 */
840
841var backwardDuration = function backwardDuration(playlist, endSequence) {
842 var result = 0;
843 var i = endSequence - playlist.mediaSequence;
844 // if a start time is available for segment immediately following
845 // the interval, use it
846 var segment = playlist.segments[i];
847
848 // Walk backward until we find the latest segment with timeline
849 // information that is earlier than endSequence
850 if (segment) {
851 if (typeof segment.start !== 'undefined') {
852 return { result: segment.start, precise: true };
853 }
854 if (typeof segment.end !== 'undefined') {
855 return {
856 result: segment.end - segment.duration,
857 precise: true
858 };
859 }
860 }
861 while (i--) {
862 segment = playlist.segments[i];
863 if (typeof segment.end !== 'undefined') {
864 return { result: result + segment.end, precise: true };
865 }
866
867 result += segment.duration;
868
869 if (typeof segment.start !== 'undefined') {
870 return { result: result + segment.start, precise: true };
871 }
872 }
873 return { result: result, precise: false };
874};
875
876/**
877 * walk forward until we find a duration we can use
878 * or return a failure
879 *
880 * @param {Playlist} playlist the playlist to walk through
881 * @param {Number} endSequence the mediaSequence to stop walking on
882 */
883var forwardDuration = function forwardDuration(playlist, endSequence) {
884 var result = 0;
885 var segment = void 0;
886 var i = endSequence - playlist.mediaSequence;
887 // Walk forward until we find the earliest segment with timeline
888 // information
889
890 for (; i < playlist.segments.length; i++) {
891 segment = playlist.segments[i];
892 if (typeof segment.start !== 'undefined') {
893 return {
894 result: segment.start - result,
895 precise: true
896 };
897 }
898
899 result += segment.duration;
900
901 if (typeof segment.end !== 'undefined') {
902 return {
903 result: segment.end - result,
904 precise: true
905 };
906 }
907 }
908 // indicate we didn't find a useful duration estimate
909 return { result: -1, precise: false };
910};
911
912/**
913 * Calculate the media duration from the segments associated with a
914 * playlist. The duration of a subinterval of the available segments
915 * may be calculated by specifying an end index.
916 *
917 * @param {Object} playlist a media playlist object
918 * @param {Number=} endSequence an exclusive upper boundary
919 * for the playlist. Defaults to playlist length.
920 * @param {Number} expired the amount of time that has dropped
921 * off the front of the playlist in a live scenario
922 * @return {Number} the duration between the first available segment
923 * and end index.
924 */
925var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
926 var backward = void 0;
927 var forward = void 0;
928
929 if (typeof endSequence === 'undefined') {
930 endSequence = playlist.mediaSequence + playlist.segments.length;
931 }
932
933 if (endSequence < playlist.mediaSequence) {
934 return 0;
935 }
936
937 // do a backward walk to estimate the duration
938 backward = backwardDuration(playlist, endSequence);
939 if (backward.precise) {
940 // if we were able to base our duration estimate on timing
941 // information provided directly from the Media Source, return
942 // it
943 return backward.result;
944 }
945
946 // walk forward to see if a precise duration estimate can be made
947 // that way
948 forward = forwardDuration(playlist, endSequence);
949 if (forward.precise) {
950 // we found a segment that has been buffered and so it's
951 // position is known precisely
952 return forward.result;
953 }
954
955 // return the less-precise, playlist-based duration estimate
956 return backward.result + expired;
957};
958
959/**
960 * Calculates the duration of a playlist. If a start and end index
961 * are specified, the duration will be for the subset of the media
962 * timeline between those two indices. The total duration for live
963 * playlists is always Infinity.
964 *
965 * @param {Object} playlist a media playlist object
966 * @param {Number=} endSequence an exclusive upper
967 * boundary for the playlist. Defaults to the playlist media
968 * sequence number plus its length.
969 * @param {Number=} expired the amount of time that has
970 * dropped off the front of the playlist in a live scenario
971 * @return {Number} the duration between the start index and end
972 * index.
973 */
974var duration = function duration(playlist, endSequence, expired) {
975 if (!playlist) {
976 return 0;
977 }
978
979 if (typeof expired !== 'number') {
980 expired = 0;
981 }
982
983 // if a slice of the total duration is not requested, use
984 // playlist-level duration indicators when they're present
985 if (typeof endSequence === 'undefined') {
986 // if present, use the duration specified in the playlist
987 if (playlist.totalDuration) {
988 return playlist.totalDuration;
989 }
990
991 // duration should be Infinity for live playlists
992 if (!playlist.endList) {
993 return window$1.Infinity;
994 }
995 }
996
997 // calculate the total duration based on the segment durations
998 return intervalDuration(playlist, endSequence, expired);
999};
1000
1001/**
1002 * Calculate the time between two indexes in the current playlist
1003 * neight the start- nor the end-index need to be within the current
1004 * playlist in which case, the targetDuration of the playlist is used
1005 * to approximate the durations of the segments
1006 *
1007 * @param {Object} playlist a media playlist object
1008 * @param {Number} startIndex
1009 * @param {Number} endIndex
1010 * @return {Number} the number of seconds between startIndex and endIndex
1011 */
1012var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
1013 var durations = 0;
1014
1015 if (startIndex > endIndex) {
1016 var _ref = [endIndex, startIndex];
1017 startIndex = _ref[0];
1018 endIndex = _ref[1];
1019 }
1020
1021 if (startIndex < 0) {
1022 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
1023 durations += playlist.targetDuration;
1024 }
1025 startIndex = 0;
1026 }
1027
1028 for (var _i = startIndex; _i < endIndex; _i++) {
1029 durations += playlist.segments[_i].duration;
1030 }
1031
1032 return durations;
1033};
1034
1035/**
1036 * Determines the media index of the segment corresponding to the safe edge of the live
1037 * window which is the duration of the last segment plus 2 target durations from the end
1038 * of the playlist.
1039 *
1040 * A liveEdgePadding can be provided which will be used instead of calculating the safe live edge.
1041 * This corresponds to suggestedPresentationDelay in DASH manifests.
1042 *
1043 * @param {Object} playlist
1044 * a media playlist object
1045 * @param {Number} [liveEdgePadding]
1046 * A number in seconds indicating how far from the end we want to be.
1047 * If provided, this value is used instead of calculating the safe live index from the target durations.
1048 * Corresponds to suggestedPresentationDelay in DASH manifests.
1049 * @return {Number}
1050 * The media index of the segment at the safe live point. 0 if there is no "safe"
1051 * point.
1052 * @function safeLiveIndex
1053 */
1054var safeLiveIndex = function safeLiveIndex(playlist, liveEdgePadding) {
1055 if (!playlist.segments.length) {
1056 return 0;
1057 }
1058
1059 var i = playlist.segments.length;
1060 var lastSegmentDuration = playlist.segments[i - 1].duration || playlist.targetDuration;
1061 var safeDistance = typeof liveEdgePadding === 'number' ? liveEdgePadding : lastSegmentDuration + playlist.targetDuration * 2;
1062
1063 if (safeDistance === 0) {
1064 return i;
1065 }
1066
1067 var distanceFromEnd = 0;
1068
1069 while (i--) {
1070 distanceFromEnd += playlist.segments[i].duration;
1071
1072 if (distanceFromEnd >= safeDistance) {
1073 break;
1074 }
1075 }
1076
1077 return Math.max(0, i);
1078};
1079
1080/**
1081 * Calculates the playlist end time
1082 *
1083 * @param {Object} playlist a media playlist object
1084 * @param {Number=} expired the amount of time that has
1085 * dropped off the front of the playlist in a live scenario
1086 * @param {Boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
1087 * playlist end calculation should consider the safe live end
1088 * (truncate the playlist end by three segments). This is normally
1089 * used for calculating the end of the playlist's seekable range.
1090 * This takes into account the value of liveEdgePadding.
1091 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
1092 * @param {Number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
1093 * If this is provided, it is used in the safe live end calculation.
1094 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
1095 * Corresponds to suggestedPresentationDelay in DASH manifests.
1096 * @returns {Number} the end time of playlist
1097 * @function playlistEnd
1098 */
1099var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
1100 if (!playlist || !playlist.segments) {
1101 return null;
1102 }
1103 if (playlist.endList) {
1104 return duration(playlist);
1105 }
1106
1107 if (expired === null) {
1108 return null;
1109 }
1110
1111 expired = expired || 0;
1112
1113 var endSequence = useSafeLiveEnd ? safeLiveIndex(playlist, liveEdgePadding) : playlist.segments.length;
1114
1115 return intervalDuration(playlist, playlist.mediaSequence + endSequence, expired);
1116};
1117
1118/**
1119 * Calculates the interval of time that is currently seekable in a
1120 * playlist. The returned time ranges are relative to the earliest
1121 * moment in the specified playlist that is still available. A full
1122 * seekable implementation for live streams would need to offset
1123 * these values by the duration of content that has expired from the
1124 * stream.
1125 *
1126 * @param {Object} playlist a media playlist object
1127 * dropped off the front of the playlist in a live scenario
1128 * @param {Number=} expired the amount of time that has
1129 * dropped off the front of the playlist in a live scenario
1130 * @param {Number} liveEdgePadding how far from the end of the playlist we should be in seconds.
1131 * Corresponds to suggestedPresentationDelay in DASH manifests.
1132 * @return {TimeRanges} the periods of time that are valid targets
1133 * for seeking
1134 */
1135var seekable = function seekable(playlist, expired, liveEdgePadding) {
1136 var useSafeLiveEnd = true;
1137 var seekableStart = expired || 0;
1138 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
1139
1140 if (seekableEnd === null) {
1141 return createTimeRange();
1142 }
1143 return createTimeRange(seekableStart, seekableEnd);
1144};
1145
1146var isWholeNumber = function isWholeNumber(num) {
1147 return num - Math.floor(num) === 0;
1148};
1149
1150var roundSignificantDigit = function roundSignificantDigit(increment, num) {
1151 // If we have a whole number, just add 1 to it
1152 if (isWholeNumber(num)) {
1153 return num + increment * 0.1;
1154 }
1155
1156 var numDecimalDigits = num.toString().split('.')[1].length;
1157
1158 for (var i = 1; i <= numDecimalDigits; i++) {
1159 var scale = Math.pow(10, i);
1160 var temp = num * scale;
1161
1162 if (isWholeNumber(temp) || i === numDecimalDigits) {
1163 return (temp + increment) / scale;
1164 }
1165 }
1166};
1167
1168var ceilLeastSignificantDigit = roundSignificantDigit.bind(null, 1);
1169var floorLeastSignificantDigit = roundSignificantDigit.bind(null, -1);
1170
1171/**
1172 * Determine the index and estimated starting time of the segment that
1173 * contains a specified playback position in a media playlist.
1174 *
1175 * @param {Object} playlist the media playlist to query
1176 * @param {Number} currentTime The number of seconds since the earliest
1177 * possible position to determine the containing segment for
1178 * @param {Number} startIndex
1179 * @param {Number} startTime
1180 * @return {Object}
1181 */
1182var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
1183 var i = void 0;
1184 var segment = void 0;
1185 var numSegments = playlist.segments.length;
1186
1187 var time = currentTime - startTime;
1188
1189 if (time < 0) {
1190 // Walk backward from startIndex in the playlist, adding durations
1191 // until we find a segment that contains `time` and return it
1192 if (startIndex > 0) {
1193 for (i = startIndex - 1; i >= 0; i--) {
1194 segment = playlist.segments[i];
1195 time += floorLeastSignificantDigit(segment.duration);
1196 if (time > 0) {
1197 return {
1198 mediaIndex: i,
1199 startTime: startTime - sumDurations(playlist, startIndex, i)
1200 };
1201 }
1202 }
1203 }
1204 // We were unable to find a good segment within the playlist
1205 // so select the first segment
1206 return {
1207 mediaIndex: 0,
1208 startTime: currentTime
1209 };
1210 }
1211
1212 // When startIndex is negative, we first walk forward to first segment
1213 // adding target durations. If we "run out of time" before getting to
1214 // the first segment, return the first segment
1215 if (startIndex < 0) {
1216 for (i = startIndex; i < 0; i++) {
1217 time -= playlist.targetDuration;
1218 if (time < 0) {
1219 return {
1220 mediaIndex: 0,
1221 startTime: currentTime
1222 };
1223 }
1224 }
1225 startIndex = 0;
1226 }
1227
1228 // Walk forward from startIndex in the playlist, subtracting durations
1229 // until we find a segment that contains `time` and return it
1230 for (i = startIndex; i < numSegments; i++) {
1231 segment = playlist.segments[i];
1232 time -= ceilLeastSignificantDigit(segment.duration);
1233 if (time < 0) {
1234 return {
1235 mediaIndex: i,
1236 startTime: startTime + sumDurations(playlist, startIndex, i)
1237 };
1238 }
1239 }
1240
1241 // We are out of possible candidates so load the last one...
1242 return {
1243 mediaIndex: numSegments - 1,
1244 startTime: currentTime
1245 };
1246};
1247
1248/**
1249 * Check whether the playlist is blacklisted or not.
1250 *
1251 * @param {Object} playlist the media playlist object
1252 * @return {boolean} whether the playlist is blacklisted or not
1253 * @function isBlacklisted
1254 */
1255var isBlacklisted = function isBlacklisted(playlist) {
1256 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
1257};
1258
1259/**
1260 * Check whether the playlist is compatible with current playback configuration or has
1261 * been blacklisted permanently for being incompatible.
1262 *
1263 * @param {Object} playlist the media playlist object
1264 * @return {boolean} whether the playlist is incompatible or not
1265 * @function isIncompatible
1266 */
1267var isIncompatible = function isIncompatible(playlist) {
1268 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
1269};
1270
1271/**
1272 * Check whether the playlist is enabled or not.
1273 *
1274 * @param {Object} playlist the media playlist object
1275 * @return {boolean} whether the playlist is enabled or not
1276 * @function isEnabled
1277 */
1278var isEnabled = function isEnabled(playlist) {
1279 var blacklisted = isBlacklisted(playlist);
1280
1281 return !playlist.disabled && !blacklisted;
1282};
1283
1284/**
1285 * Check whether the playlist has been manually disabled through the representations api.
1286 *
1287 * @param {Object} playlist the media playlist object
1288 * @return {boolean} whether the playlist is disabled manually or not
1289 * @function isDisabled
1290 */
1291var isDisabled = function isDisabled(playlist) {
1292 return playlist.disabled;
1293};
1294
1295/**
1296 * Returns whether the current playlist is an AES encrypted HLS stream
1297 *
1298 * @return {Boolean} true if it's an AES encrypted HLS stream
1299 */
1300var isAes = function isAes(media) {
1301 for (var i = 0; i < media.segments.length; i++) {
1302 if (media.segments[i].key) {
1303 return true;
1304 }
1305 }
1306 return false;
1307};
1308
1309/**
1310 * Returns whether the current playlist contains fMP4
1311 *
1312 * @return {Boolean} true if the playlist contains fMP4
1313 */
1314var isFmp4 = function isFmp4(media) {
1315 for (var i = 0; i < media.segments.length; i++) {
1316 if (media.segments[i].map) {
1317 return true;
1318 }
1319 }
1320 return false;
1321};
1322
1323/**
1324 * Checks if the playlist has a value for the specified attribute
1325 *
1326 * @param {String} attr
1327 * Attribute to check for
1328 * @param {Object} playlist
1329 * The media playlist object
1330 * @return {Boolean}
1331 * Whether the playlist contains a value for the attribute or not
1332 * @function hasAttribute
1333 */
1334var hasAttribute = function hasAttribute(attr, playlist) {
1335 return playlist.attributes && playlist.attributes[attr];
1336};
1337
1338/**
1339 * Estimates the time required to complete a segment download from the specified playlist
1340 *
1341 * @param {Number} segmentDuration
1342 * Duration of requested segment
1343 * @param {Number} bandwidth
1344 * Current measured bandwidth of the player
1345 * @param {Object} playlist
1346 * The media playlist object
1347 * @param {Number=} bytesReceived
1348 * Number of bytes already received for the request. Defaults to 0
1349 * @return {Number|NaN}
1350 * The estimated time to request the segment. NaN if bandwidth information for
1351 * the given playlist is unavailable
1352 * @function estimateSegmentRequestTime
1353 */
1354var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist) {
1355 var bytesReceived = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0;
1356
1357 if (!hasAttribute('BANDWIDTH', playlist)) {
1358 return NaN;
1359 }
1360
1361 var size = segmentDuration * playlist.attributes.BANDWIDTH;
1362
1363 return (size - bytesReceived * 8) / bandwidth;
1364};
1365
1366/*
1367 * Returns whether the current playlist is the lowest rendition
1368 *
1369 * @return {Boolean} true if on lowest rendition
1370 */
1371var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1372 if (master.playlists.length === 1) {
1373 return true;
1374 }
1375
1376 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1377
1378 return master.playlists.filter(function (playlist) {
1379 if (!isEnabled(playlist)) {
1380 return false;
1381 }
1382
1383 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1384 }).length === 0;
1385};
1386
1387// exports
1388var Playlist = {
1389 duration: duration,
1390 seekable: seekable,
1391 safeLiveIndex: safeLiveIndex,
1392 getMediaInfoForTime: getMediaInfoForTime,
1393 isEnabled: isEnabled,
1394 isDisabled: isDisabled,
1395 isBlacklisted: isBlacklisted,
1396 isIncompatible: isIncompatible,
1397 playlistEnd: playlistEnd,
1398 isAes: isAes,
1399 isFmp4: isFmp4,
1400 hasAttribute: hasAttribute,
1401 estimateSegmentRequestTime: estimateSegmentRequestTime,
1402 isLowestEnabledRendition: isLowestEnabledRendition
1403};
1404
1405/**
1406 * @file xhr.js
1407 */
1408
1409var videojsXHR = videojs.xhr,
1410 mergeOptions$1 = videojs.mergeOptions;
1411
1412
1413var xhrFactory = function xhrFactory() {
1414 var xhr = function XhrFunction(options, callback) {
1415 // Add a default timeout for all hls requests
1416 options = mergeOptions$1({
1417 timeout: 45e3
1418 }, options);
1419
1420 // Allow an optional user-specified function to modify the option
1421 // object before we construct the xhr request
1422 var beforeRequest = XhrFunction.beforeRequest || videojs.Hls.xhr.beforeRequest;
1423
1424 if (beforeRequest && typeof beforeRequest === 'function') {
1425 var newOptions = beforeRequest(options);
1426
1427 if (newOptions) {
1428 options = newOptions;
1429 }
1430 }
1431
1432 var request = videojsXHR(options, function (error, response) {
1433 var reqResponse = request.response;
1434
1435 if (!error && reqResponse) {
1436 request.responseTime = Date.now();
1437 request.roundTripTime = request.responseTime - request.requestTime;
1438 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
1439 if (!request.bandwidth) {
1440 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
1441 }
1442 }
1443
1444 if (response.headers) {
1445 request.responseHeaders = response.headers;
1446 }
1447
1448 // videojs.xhr now uses a specific code on the error
1449 // object to signal that a request has timed out instead
1450 // of setting a boolean on the request object
1451 if (error && error.code === 'ETIMEDOUT') {
1452 request.timedout = true;
1453 }
1454
1455 // videojs.xhr no longer considers status codes outside of 200 and 0
1456 // (for file uris) to be errors, but the old XHR did, so emulate that
1457 // behavior. Status 206 may be used in response to byterange requests.
1458 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
1459 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
1460 }
1461
1462 callback(error, request);
1463 });
1464 var originalAbort = request.abort;
1465
1466 request.abort = function () {
1467 request.aborted = true;
1468 return originalAbort.apply(request, arguments);
1469 };
1470 request.uri = options.uri;
1471 request.requestTime = Date.now();
1472 return request;
1473 };
1474
1475 return xhr;
1476};
1477
1478/**
1479 * Turns segment byterange into a string suitable for use in
1480 * HTTP Range requests
1481 *
1482 * @param {Object} byterange - an object with two values defining the start and end
1483 * of a byte-range
1484 */
1485var byterangeStr = function byterangeStr(byterange) {
1486 var byterangeStart = void 0;
1487 var byterangeEnd = void 0;
1488
1489 // `byterangeEnd` is one less than `offset + length` because the HTTP range
1490 // header uses inclusive ranges
1491 byterangeEnd = byterange.offset + byterange.length - 1;
1492 byterangeStart = byterange.offset;
1493 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
1494};
1495
1496/**
1497 * Defines headers for use in the xhr request for a particular segment.
1498 *
1499 * @param {Object} segment - a simplified copy of the segmentInfo object
1500 * from SegmentLoader
1501 */
1502var segmentXhrHeaders = function segmentXhrHeaders(segment) {
1503 var headers = {};
1504
1505 if (segment.byterange) {
1506 headers.Range = byterangeStr(segment.byterange);
1507 }
1508 return headers;
1509};
1510
1511/**
1512 * @file bin-utils.js
1513 */
1514
1515/**
1516 * convert a TimeRange to text
1517 *
1518 * @param {TimeRange} range the timerange to use for conversion
1519 * @param {Number} i the iterator on the range to convert
1520 */
1521var textRange = function textRange(range, i) {
1522 return range.start(i) + '-' + range.end(i);
1523};
1524
1525/**
1526 * format a number as hex string
1527 *
1528 * @param {Number} e The number
1529 * @param {Number} i the iterator
1530 */
1531var formatHexString = function formatHexString(e, i) {
1532 var value = e.toString(16);
1533
1534 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
1535};
1536var formatAsciiString = function formatAsciiString(e) {
1537 if (e >= 0x20 && e < 0x7e) {
1538 return String.fromCharCode(e);
1539 }
1540 return '.';
1541};
1542
1543/**
1544 * Creates an object for sending to a web worker modifying properties that are TypedArrays
1545 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
1546 *
1547 * @param {Object} message
1548 * Object of properties and values to send to the web worker
1549 * @return {Object}
1550 * Modified message with TypedArray values expanded
1551 * @function createTransferableMessage
1552 */
1553var createTransferableMessage = function createTransferableMessage(message) {
1554 var transferable = {};
1555
1556 Object.keys(message).forEach(function (key) {
1557 var value = message[key];
1558
1559 if (ArrayBuffer.isView(value)) {
1560 transferable[key] = {
1561 bytes: value.buffer,
1562 byteOffset: value.byteOffset,
1563 byteLength: value.byteLength
1564 };
1565 } else {
1566 transferable[key] = value;
1567 }
1568 });
1569
1570 return transferable;
1571};
1572
1573/**
1574 * Returns a unique string identifier for a media initialization
1575 * segment.
1576 */
1577var initSegmentId = function initSegmentId(initSegment) {
1578 var byterange = initSegment.byterange || {
1579 length: Infinity,
1580 offset: 0
1581 };
1582
1583 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
1584};
1585
1586/**
1587 * Returns a unique string identifier for a media segment key.
1588 */
1589var segmentKeyId = function segmentKeyId(key) {
1590 return key.resolvedUri;
1591};
1592
1593/**
1594 * utils to help dump binary data to the console
1595 */
1596var hexDump = function hexDump(data) {
1597 var bytes = Array.prototype.slice.call(data);
1598 var step = 16;
1599 var result = '';
1600 var hex = void 0;
1601 var ascii = void 0;
1602
1603 for (var j = 0; j < bytes.length / step; j++) {
1604 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
1605 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
1606 result += hex + ' ' + ascii + '\n';
1607 }
1608
1609 return result;
1610};
1611
1612var tagDump = function tagDump(_ref) {
1613 var bytes = _ref.bytes;
1614 return hexDump(bytes);
1615};
1616
1617var textRanges = function textRanges(ranges) {
1618 var result = '';
1619 var i = void 0;
1620
1621 for (i = 0; i < ranges.length; i++) {
1622 result += textRange(ranges, i) + ' ';
1623 }
1624 return result;
1625};
1626
1627var utils = /*#__PURE__*/Object.freeze({
1628 createTransferableMessage: createTransferableMessage,
1629 initSegmentId: initSegmentId,
1630 segmentKeyId: segmentKeyId,
1631 hexDump: hexDump,
1632 tagDump: tagDump,
1633 textRanges: textRanges
1634});
1635
1636// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
1637
1638// Add 25% to the segment duration to account for small discrepencies in segment timing.
1639// 25% was arbitrarily chosen, and may need to be refined over time.
1640var SEGMENT_END_FUDGE_PERCENT = 0.25;
1641
1642/**
1643 * Converts a player time (any time that can be gotten/set from player.currentTime(),
1644 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
1645 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
1646 *
1647 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
1648 * point" (a point where we have a mapping from program time to player time, with player
1649 * time being the post transmux start of the segment).
1650 *
1651 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
1652 *
1653 * @param {Number} playerTime the player time
1654 * @param {Object} segment the segment which contains the player time
1655 * @return {Date} program time
1656 */
1657var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
1658 if (!segment.dateTimeObject) {
1659 // Can't convert without an "anchor point" for the program time (i.e., a time that can
1660 // be used to map the start of a segment with a real world time).
1661 return null;
1662 }
1663
1664 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
1665 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart;
1666
1667 // get the start of the content from before old content is prepended
1668 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
1669 var offsetFromSegmentStart = playerTime - startOfSegment;
1670
1671 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
1672};
1673
1674var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
1675 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
1676};
1677
1678/**
1679 * Finds a segment that contains the time requested given as an ISO-8601 string. The
1680 * returned segment might be an estimate or an accurate match.
1681 *
1682 * @param {String} programTime The ISO-8601 programTime to find a match for
1683 * @param {Object} playlist A playlist object to search within
1684 */
1685var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
1686 // Assumptions:
1687 // - verifyProgramDateTimeTags has already been run
1688 // - live streams have been started
1689
1690 var dateTimeObject = void 0;
1691
1692 try {
1693 dateTimeObject = new Date(programTime);
1694 } catch (e) {
1695 return null;
1696 }
1697
1698 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
1699 return null;
1700 }
1701
1702 var segment = playlist.segments[0];
1703
1704 if (dateTimeObject < segment.dateTimeObject) {
1705 // Requested time is before stream start.
1706 return null;
1707 }
1708
1709 for (var i = 0; i < playlist.segments.length - 1; i++) {
1710 segment = playlist.segments[i];
1711
1712 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
1713
1714 if (dateTimeObject < nextSegmentStart) {
1715 break;
1716 }
1717 }
1718
1719 var lastSegment = playlist.segments[playlist.segments.length - 1];
1720 var lastSegmentStart = lastSegment.dateTimeObject;
1721 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
1722 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
1723
1724 if (dateTimeObject > lastSegmentEnd) {
1725 // Beyond the end of the stream, or our best guess of the end of the stream.
1726 return null;
1727 }
1728
1729 if (dateTimeObject > lastSegmentStart) {
1730 segment = lastSegment;
1731 }
1732
1733 return {
1734 segment: segment,
1735 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
1736 // Although, given that all segments have accurate date time objects, the segment
1737 // selected should be accurate, unless the video has been transmuxed at some point
1738 // (determined by the presence of the videoTimingInfo object), the segment's "player
1739 // time" (the start time in the player) can't be considered accurate.
1740 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
1741 };
1742};
1743
1744/**
1745 * Finds a segment that contains the given player time(in seconds).
1746 *
1747 * @param {Number} time The player time to find a match for
1748 * @param {Object} playlist A playlist object to search within
1749 */
1750var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
1751 // Assumptions:
1752 // - there will always be a segment.duration
1753 // - we can start from zero
1754 // - segments are in time order
1755
1756 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
1757 return null;
1758 }
1759
1760 var segmentEnd = 0;
1761 var segment = void 0;
1762
1763 for (var i = 0; i < playlist.segments.length; i++) {
1764 segment = playlist.segments[i];
1765
1766 // videoTimingInfo is set after the segment is downloaded and transmuxed, and
1767 // should contain the most accurate values we have for the segment's player times.
1768 //
1769 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
1770 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
1771 // calculate an end value.
1772 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
1773
1774 if (time <= segmentEnd) {
1775 break;
1776 }
1777 }
1778
1779 var lastSegment = playlist.segments[playlist.segments.length - 1];
1780
1781 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
1782 // The time requested is beyond the stream end.
1783 return null;
1784 }
1785
1786 if (time > segmentEnd) {
1787 // The time is within or beyond the last segment.
1788 //
1789 // Check to see if the time is beyond a reasonable guess of the end of the stream.
1790 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
1791 // Technically, because the duration value is only an estimate, the time may still
1792 // exist in the last segment, however, there isn't enough information to make even
1793 // a reasonable estimate.
1794 return null;
1795 }
1796
1797 segment = lastSegment;
1798 }
1799
1800 return {
1801 segment: segment,
1802 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
1803 // Because videoTimingInfo is only set after transmux, it is the only way to get
1804 // accurate timing values.
1805 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
1806 };
1807};
1808
1809/**
1810 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
1811 * If the offset returned is positive, the programTime occurs after the
1812 * comparisonTimestamp.
1813 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
1814 *
1815 * @param {String} comparisonTimeStamp An ISO-8601 timestamp to compare against
1816 * @param {String} programTime The programTime as an ISO-8601 string
1817 * @return {Number} offset
1818 */
1819var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
1820 var segmentDateTime = void 0;
1821 var programDateTime = void 0;
1822
1823 try {
1824 segmentDateTime = new Date(comparisonTimeStamp);
1825 programDateTime = new Date(programTime);
1826 } catch (e) {
1827 // TODO handle error
1828 }
1829
1830 var segmentTimeEpoch = segmentDateTime.getTime();
1831 var programTimeEpoch = programDateTime.getTime();
1832
1833 return (programTimeEpoch - segmentTimeEpoch) / 1000;
1834};
1835
1836/**
1837 * Checks that all segments in this playlist have programDateTime tags.
1838 *
1839 * @param {Object} playlist A playlist object
1840 */
1841var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
1842 if (!playlist.segments || playlist.segments.length === 0) {
1843 return false;
1844 }
1845
1846 for (var i = 0; i < playlist.segments.length; i++) {
1847 var segment = playlist.segments[i];
1848
1849 if (!segment.dateTimeObject) {
1850 return false;
1851 }
1852 }
1853
1854 return true;
1855};
1856
1857/**
1858 * Returns the programTime of the media given a playlist and a playerTime.
1859 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
1860 * If the segments containing the time requested have not been buffered yet, an estimate
1861 * may be returned to the callback.
1862 *
1863 * @param {Object} args
1864 * @param {Object} args.playlist A playlist object to search within
1865 * @param {Number} time A playerTime in seconds
1866 * @param {Function} callback(err, programTime)
1867 * @returns {String} err.message A detailed error message
1868 * @returns {Object} programTime
1869 * @returns {Number} programTime.mediaSeconds The streamTime in seconds
1870 * @returns {String} programTime.programDateTime The programTime as an ISO-8601 String
1871 */
1872var getProgramTime = function getProgramTime(_ref) {
1873 var playlist = _ref.playlist,
1874 _ref$time = _ref.time,
1875 time = _ref$time === undefined ? undefined : _ref$time,
1876 callback = _ref.callback;
1877
1878
1879 if (!callback) {
1880 throw new Error('getProgramTime: callback must be provided');
1881 }
1882
1883 if (!playlist || time === undefined) {
1884 return callback({
1885 message: 'getProgramTime: playlist and time must be provided'
1886 });
1887 }
1888
1889 var matchedSegment = findSegmentForPlayerTime(time, playlist);
1890
1891 if (!matchedSegment) {
1892 return callback({
1893 message: 'valid programTime was not found'
1894 });
1895 }
1896
1897 if (matchedSegment.type === 'estimate') {
1898 return callback({
1899 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
1900 seekTime: matchedSegment.estimatedStart
1901 });
1902 }
1903
1904 var programTimeObject = {
1905 mediaSeconds: time
1906 };
1907 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
1908
1909 if (programTime) {
1910 programTimeObject.programDateTime = programTime.toISOString();
1911 }
1912
1913 return callback(null, programTimeObject);
1914};
1915
1916/**
1917 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
1918 *
1919 * @param {Object} args
1920 * @param {String} args.programTime A programTime to seek to as an ISO-8601 String
1921 * @param {Object} args.playlist A playlist to look within
1922 * @param {Number} args.retryCount The number of times to try for an accurate seek. Default is 2.
1923 * @param {Function} args.seekTo A method to perform a seek
1924 * @param {Boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
1925 * @param {Object} args.tech The tech to seek on
1926 * @param {Function} args.callback(err, newTime) A callback to return the new time to
1927 * @returns {String} err.message A detailed error message
1928 * @returns {Number} newTime The exact time that was seeked to in seconds
1929 */
1930var seekToProgramTime = function seekToProgramTime(_ref2) {
1931 var programTime = _ref2.programTime,
1932 playlist = _ref2.playlist,
1933 _ref2$retryCount = _ref2.retryCount,
1934 retryCount = _ref2$retryCount === undefined ? 2 : _ref2$retryCount,
1935 seekTo = _ref2.seekTo,
1936 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
1937 pauseAfterSeek = _ref2$pauseAfterSeek === undefined ? true : _ref2$pauseAfterSeek,
1938 tech = _ref2.tech,
1939 callback = _ref2.callback;
1940
1941
1942 if (!callback) {
1943 throw new Error('seekToProgramTime: callback must be provided');
1944 }
1945
1946 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
1947 return callback({
1948 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
1949 });
1950 }
1951
1952 if (!playlist.endList && !tech.hasStarted_) {
1953 return callback({
1954 message: 'player must be playing a live stream to start buffering'
1955 });
1956 }
1957
1958 if (!verifyProgramDateTimeTags(playlist)) {
1959 return callback({
1960 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
1961 });
1962 }
1963
1964 var matchedSegment = findSegmentForProgramTime(programTime, playlist);
1965
1966 // no match
1967 if (!matchedSegment) {
1968 return callback({
1969 message: programTime + ' was not found in the stream'
1970 });
1971 }
1972
1973 var segment = matchedSegment.segment;
1974 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
1975
1976 if (matchedSegment.type === 'estimate') {
1977 // we've run out of retries
1978 if (retryCount === 0) {
1979 return callback({
1980 message: programTime + ' is not buffered yet. Try again'
1981 });
1982 }
1983
1984 seekTo(matchedSegment.estimatedStart + mediaOffset);
1985
1986 tech.one('seeked', function () {
1987 seekToProgramTime({
1988 programTime: programTime,
1989 playlist: playlist,
1990 retryCount: retryCount - 1,
1991 seekTo: seekTo,
1992 pauseAfterSeek: pauseAfterSeek,
1993 tech: tech,
1994 callback: callback
1995 });
1996 });
1997
1998 return;
1999 }
2000
2001 // Since the segment.start value is determined from the buffered end or ending time
2002 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
2003 // modifications.
2004 var seekToTime = segment.start + mediaOffset;
2005 var seekedCallback = function seekedCallback() {
2006 return callback(null, tech.currentTime());
2007 };
2008
2009 // listen for seeked event
2010 tech.one('seeked', seekedCallback);
2011 // pause before seeking as video.js will restore this state
2012 if (pauseAfterSeek) {
2013 tech.pause();
2014 }
2015 seekTo(seekToTime);
2016};
2017
2018/**
2019 * ranges
2020 *
2021 * Utilities for working with TimeRanges.
2022 *
2023 */
2024
2025// Fudge factor to account for TimeRanges rounding
2026var TIME_FUDGE_FACTOR = 1 / 30;
2027// Comparisons between time values such as current time and the end of the buffered range
2028// can be misleading because of precision differences or when the current media has poorly
2029// aligned audio and video, which can cause values to be slightly off from what you would
2030// expect. This value is what we consider to be safe to use in such comparisons to account
2031// for these scenarios.
2032var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2033var filterRanges = function filterRanges(timeRanges, predicate) {
2034 var results = [];
2035 var i = void 0;
2036
2037 if (timeRanges && timeRanges.length) {
2038 // Search for ranges that match the predicate
2039 for (i = 0; i < timeRanges.length; i++) {
2040 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2041 results.push([timeRanges.start(i), timeRanges.end(i)]);
2042 }
2043 }
2044 }
2045
2046 return videojs.createTimeRanges(results);
2047};
2048
2049/**
2050 * Attempts to find the buffered TimeRange that contains the specified
2051 * time.
2052 * @param {TimeRanges} buffered - the TimeRanges object to query
2053 * @param {number} time - the time to filter on.
2054 * @returns {TimeRanges} a new TimeRanges object
2055 */
2056var findRange = function findRange(buffered, time) {
2057 return filterRanges(buffered, function (start, end) {
2058 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2059 });
2060};
2061
2062/**
2063 * Returns the TimeRanges that begin later than the specified time.
2064 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2065 * @param {number} time - the time to filter on.
2066 * @returns {TimeRanges} a new TimeRanges object.
2067 */
2068var findNextRange = function findNextRange(timeRanges, time) {
2069 return filterRanges(timeRanges, function (start) {
2070 return start - TIME_FUDGE_FACTOR >= time;
2071 });
2072};
2073
2074/**
2075 * Returns gaps within a list of TimeRanges
2076 * @param {TimeRanges} buffered - the TimeRanges object
2077 * @return {TimeRanges} a TimeRanges object of gaps
2078 */
2079var findGaps = function findGaps(buffered) {
2080 if (buffered.length < 2) {
2081 return videojs.createTimeRanges();
2082 }
2083
2084 var ranges = [];
2085
2086 for (var i = 1; i < buffered.length; i++) {
2087 var start = buffered.end(i - 1);
2088 var end = buffered.start(i);
2089
2090 ranges.push([start, end]);
2091 }
2092
2093 return videojs.createTimeRanges(ranges);
2094};
2095
2096/**
2097 * Gets a human readable string for a TimeRange
2098 *
2099 * @param {TimeRange} range
2100 * @returns {String} a human readable string
2101 */
2102var printableRange = function printableRange(range) {
2103 var strArr = [];
2104
2105 if (!range || !range.length) {
2106 return '';
2107 }
2108
2109 for (var i = 0; i < range.length; i++) {
2110 strArr.push(range.start(i) + ' => ' + range.end(i));
2111 }
2112
2113 return strArr.join(', ');
2114};
2115
2116/**
2117 * Calculates the amount of time left in seconds until the player hits the end of the
2118 * buffer and causes a rebuffer
2119 *
2120 * @param {TimeRange} buffered
2121 * The state of the buffer
2122 * @param {Numnber} currentTime
2123 * The current time of the player
2124 * @param {Number} playbackRate
2125 * The current playback rate of the player. Defaults to 1.
2126 * @return {Number}
2127 * Time until the player has to start rebuffering in seconds.
2128 * @function timeUntilRebuffer
2129 */
2130var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime) {
2131 var playbackRate = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1;
2132
2133 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
2134
2135 return (bufferedEnd - currentTime) / playbackRate;
2136};
2137
2138/**
2139 * Converts a TimeRanges object into an array representation
2140 * @param {TimeRanges} timeRanges
2141 * @returns {Array}
2142 */
2143var timeRangesToArray = function timeRangesToArray(timeRanges) {
2144 var timeRangesList = [];
2145
2146 for (var i = 0; i < timeRanges.length; i++) {
2147 timeRangesList.push({
2148 start: timeRanges.start(i),
2149 end: timeRanges.end(i)
2150 });
2151 }
2152
2153 return timeRangesList;
2154};
2155
2156/**
2157 * @file create-text-tracks-if-necessary.js
2158 */
2159
2160/**
2161 * Create text tracks on video.js if they exist on a segment.
2162 *
2163 * @param {Object} sourceBuffer the VSB or FSB
2164 * @param {Object} mediaSource the HTML media source
2165 * @param {Object} segment the segment that may contain the text track
2166 * @private
2167 */
2168var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
2169 var player = mediaSource.player_;
2170
2171 // create an in-band caption track if one is present in the segment
2172 if (segment.captions && segment.captions.length) {
2173 if (!sourceBuffer.inbandTextTracks_) {
2174 sourceBuffer.inbandTextTracks_ = {};
2175 }
2176
2177 for (var trackId in segment.captionStreams) {
2178 if (!sourceBuffer.inbandTextTracks_[trackId]) {
2179 player.tech_.trigger({ type: 'usage', name: 'hls-608' });
2180 var track = player.textTracks().getTrackById(trackId);
2181
2182 if (track) {
2183 // Resuse an existing track with a CC# id because this was
2184 // very likely created by videojs-contrib-hls from information
2185 // in the m3u8 for us to use
2186 sourceBuffer.inbandTextTracks_[trackId] = track;
2187 } else {
2188 // Otherwise, create a track with the default `CC#` label and
2189 // without a language
2190 sourceBuffer.inbandTextTracks_[trackId] = player.addRemoteTextTrack({
2191 kind: 'captions',
2192 id: trackId,
2193 label: trackId
2194 }, false).track;
2195 }
2196 }
2197 }
2198 }
2199
2200 if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
2201 sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
2202 kind: 'metadata',
2203 label: 'Timed Metadata'
2204 }, false).track;
2205 sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
2206 }
2207};
2208
2209/**
2210 * @file remove-cues-from-track.js
2211 */
2212
2213/**
2214 * Remove cues from a track on video.js.
2215 *
2216 * @param {Double} start start of where we should remove the cue
2217 * @param {Double} end end of where the we should remove the cue
2218 * @param {Object} track the text track to remove the cues from
2219 * @private
2220 */
2221var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
2222 var i = void 0;
2223 var cue = void 0;
2224
2225 if (!track) {
2226 return;
2227 }
2228
2229 if (!track.cues) {
2230 return;
2231 }
2232
2233 i = track.cues.length;
2234
2235 while (i--) {
2236 cue = track.cues[i];
2237
2238 // Remove any overlapping cue
2239 if (cue.startTime <= end && cue.endTime >= start) {
2240 track.removeCue(cue);
2241 }
2242 }
2243};
2244
2245/**
2246 * @file add-text-track-data.js
2247 */
2248/**
2249 * Define properties on a cue for backwards compatability,
2250 * but warn the user that the way that they are using it
2251 * is depricated and will be removed at a later date.
2252 *
2253 * @param {Cue} cue the cue to add the properties on
2254 * @private
2255 */
2256var deprecateOldCue = function deprecateOldCue(cue) {
2257 Object.defineProperties(cue.frame, {
2258 id: {
2259 get: function get() {
2260 videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
2261 return cue.value.key;
2262 }
2263 },
2264 value: {
2265 get: function get() {
2266 videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
2267 return cue.value.data;
2268 }
2269 },
2270 privateData: {
2271 get: function get() {
2272 videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
2273 return cue.value.data;
2274 }
2275 }
2276 });
2277};
2278
2279var durationOfVideo = function durationOfVideo(duration) {
2280 var dur = void 0;
2281
2282 if (isNaN(duration) || Math.abs(duration) === Infinity) {
2283 dur = Number.MAX_VALUE;
2284 } else {
2285 dur = duration;
2286 }
2287 return dur;
2288};
2289/**
2290 * Add text track data to a source handler given the captions and
2291 * metadata from the buffer.
2292 *
2293 * @param {Object} sourceHandler the virtual source buffer
2294 * @param {Array} captionArray an array of caption data
2295 * @param {Array} metadataArray an array of meta data
2296 * @private
2297 */
2298var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
2299 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
2300
2301 if (captionArray) {
2302 captionArray.forEach(function (caption) {
2303 var track = caption.stream;
2304
2305 this.inbandTextTracks_[track].addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
2306 }, sourceHandler);
2307 }
2308
2309 if (metadataArray) {
2310 var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
2311
2312 metadataArray.forEach(function (metadata) {
2313 var time = metadata.cueTime + this.timestampOffset;
2314
2315 // if time isn't a finite number between 0 and Infinity, like NaN,
2316 // ignore this bit of metadata.
2317 // This likely occurs when you have an non-timed ID3 tag like TIT2,
2318 // which is the "Title/Songname/Content description" frame
2319 if (typeof time !== 'number' || window$1.isNaN(time) || time < 0 || !(time < Infinity)) {
2320 return;
2321 }
2322
2323 metadata.frames.forEach(function (frame) {
2324 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
2325
2326 cue.frame = frame;
2327 cue.value = frame;
2328 deprecateOldCue(cue);
2329
2330 this.metadataTrack_.addCue(cue);
2331 }, this);
2332 }, sourceHandler);
2333
2334 // Updating the metadeta cues so that
2335 // the endTime of each cue is the startTime of the next cue
2336 // the endTime of last cue is the duration of the video
2337 if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
2338 var cues = sourceHandler.metadataTrack_.cues;
2339 var cuesArray = [];
2340
2341 // Create a copy of the TextTrackCueList...
2342 // ...disregarding cues with a falsey value
2343 for (var i = 0; i < cues.length; i++) {
2344 if (cues[i]) {
2345 cuesArray.push(cues[i]);
2346 }
2347 }
2348
2349 // Group cues by their startTime value
2350 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
2351 var timeSlot = obj[cue.startTime] || [];
2352
2353 timeSlot.push(cue);
2354 obj[cue.startTime] = timeSlot;
2355
2356 return obj;
2357 }, {});
2358
2359 // Sort startTimes by ascending order
2360 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
2361 return Number(a) - Number(b);
2362 });
2363
2364 // Map each cue group's endTime to the next group's startTime
2365 sortedStartTimes.forEach(function (startTime, idx) {
2366 var cueGroup = cuesGroupedByStartTime[startTime];
2367 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
2368
2369 // Map each cue's endTime the next group's startTime
2370 cueGroup.forEach(function (cue) {
2371 cue.endTime = nextTime;
2372 });
2373 });
2374 }
2375 }
2376};
2377
2378var win = typeof window !== 'undefined' ? window : {},
2379 TARGET = typeof Symbol === 'undefined' ? '__target' : Symbol(),
2380 SCRIPT_TYPE = 'application/javascript',
2381 BlobBuilder = win.BlobBuilder || win.WebKitBlobBuilder || win.MozBlobBuilder || win.MSBlobBuilder,
2382 URL = win.URL || win.webkitURL || URL && URL.msURL,
2383 Worker = win.Worker;
2384
2385/**
2386 * Returns a wrapper around Web Worker code that is constructible.
2387 *
2388 * @function shimWorker
2389 *
2390 * @param { String } filename The name of the file
2391 * @param { Function } fn Function wrapping the code of the worker
2392 */
2393function shimWorker(filename, fn) {
2394 return function ShimWorker(forceFallback) {
2395 var o = this;
2396
2397 if (!fn) {
2398 return new Worker(filename);
2399 } else if (Worker && !forceFallback) {
2400 // Convert the function's inner code to a string to construct the worker
2401 var source = fn.toString().replace(/^function.+?{/, '').slice(0, -1),
2402 objURL = createSourceObject(source);
2403
2404 this[TARGET] = new Worker(objURL);
2405 wrapTerminate(this[TARGET], objURL);
2406 return this[TARGET];
2407 } else {
2408 var selfShim = {
2409 postMessage: function postMessage(m) {
2410 if (o.onmessage) {
2411 setTimeout(function () {
2412 o.onmessage({ data: m, target: selfShim });
2413 });
2414 }
2415 }
2416 };
2417
2418 fn.call(selfShim);
2419 this.postMessage = function (m) {
2420 setTimeout(function () {
2421 selfShim.onmessage({ data: m, target: o });
2422 });
2423 };
2424 this.isThisThread = true;
2425 }
2426 };
2427}
2428// Test Worker capabilities
2429if (Worker) {
2430 var testWorker,
2431 objURL = createSourceObject('self.onmessage = function () {}'),
2432 testArray = new Uint8Array(1);
2433
2434 try {
2435 testWorker = new Worker(objURL);
2436
2437 // Native browser on some Samsung devices throws for transferables, let's detect it
2438 testWorker.postMessage(testArray, [testArray.buffer]);
2439 } catch (e) {
2440 Worker = null;
2441 } finally {
2442 URL.revokeObjectURL(objURL);
2443 if (testWorker) {
2444 testWorker.terminate();
2445 }
2446 }
2447}
2448
2449function createSourceObject(str) {
2450 try {
2451 return URL.createObjectURL(new Blob([str], { type: SCRIPT_TYPE }));
2452 } catch (e) {
2453 var blob = new BlobBuilder();
2454 blob.append(str);
2455 return URL.createObjectURL(blob.getBlob(type));
2456 }
2457}
2458
2459function wrapTerminate(worker, objURL) {
2460 if (!worker || !objURL) return;
2461 var term = worker.terminate;
2462 worker.objURL = objURL;
2463 worker.terminate = function () {
2464 if (worker.objURL) URL.revokeObjectURL(worker.objURL);
2465 term.call(worker);
2466 };
2467}
2468
2469var TransmuxWorker = new shimWorker("./transmuxer-worker.worker.js", function (window, document$$1) {
2470 var self = this;
2471 var transmuxerWorker = function () {
2472
2473 /**
2474 * mux.js
2475 *
2476 * Copyright (c) Brightcove
2477 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
2478 *
2479 * A lightweight readable stream implemention that handles event dispatching.
2480 * Objects that inherit from streams should call init in their constructors.
2481 */
2482
2483 var Stream = function Stream() {
2484 this.init = function () {
2485 var listeners = {};
2486 /**
2487 * Add a listener for a specified event type.
2488 * @param type {string} the event name
2489 * @param listener {function} the callback to be invoked when an event of
2490 * the specified type occurs
2491 */
2492 this.on = function (type, listener) {
2493 if (!listeners[type]) {
2494 listeners[type] = [];
2495 }
2496 listeners[type] = listeners[type].concat(listener);
2497 };
2498 /**
2499 * Remove a listener for a specified event type.
2500 * @param type {string} the event name
2501 * @param listener {function} a function previously registered for this
2502 * type of event through `on`
2503 */
2504 this.off = function (type, listener) {
2505 var index;
2506 if (!listeners[type]) {
2507 return false;
2508 }
2509 index = listeners[type].indexOf(listener);
2510 listeners[type] = listeners[type].slice();
2511 listeners[type].splice(index, 1);
2512 return index > -1;
2513 };
2514 /**
2515 * Trigger an event of the specified type on this stream. Any additional
2516 * arguments to this function are passed as parameters to event listeners.
2517 * @param type {string} the event name
2518 */
2519 this.trigger = function (type) {
2520 var callbacks, i, length, args;
2521 callbacks = listeners[type];
2522 if (!callbacks) {
2523 return;
2524 }
2525 // Slicing the arguments on every invocation of this method
2526 // can add a significant amount of overhead. Avoid the
2527 // intermediate object creation for the common case of a
2528 // single callback argument
2529 if (arguments.length === 2) {
2530 length = callbacks.length;
2531 for (i = 0; i < length; ++i) {
2532 callbacks[i].call(this, arguments[1]);
2533 }
2534 } else {
2535 args = [];
2536 i = arguments.length;
2537 for (i = 1; i < arguments.length; ++i) {
2538 args.push(arguments[i]);
2539 }
2540 length = callbacks.length;
2541 for (i = 0; i < length; ++i) {
2542 callbacks[i].apply(this, args);
2543 }
2544 }
2545 };
2546 /**
2547 * Destroys the stream and cleans up.
2548 */
2549 this.dispose = function () {
2550 listeners = {};
2551 };
2552 };
2553 };
2554
2555 /**
2556 * Forwards all `data` events on this stream to the destination stream. The
2557 * destination stream should provide a method `push` to receive the data
2558 * events as they arrive.
2559 * @param destination {stream} the stream that will receive all `data` events
2560 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
2561 * when the current stream emits a 'done' event
2562 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
2563 */
2564 Stream.prototype.pipe = function (destination) {
2565 this.on('data', function (data) {
2566 destination.push(data);
2567 });
2568
2569 this.on('done', function (flushSource) {
2570 destination.flush(flushSource);
2571 });
2572
2573 this.on('partialdone', function (flushSource) {
2574 destination.partialFlush(flushSource);
2575 });
2576
2577 this.on('endedtimeline', function (flushSource) {
2578 destination.endTimeline(flushSource);
2579 });
2580
2581 this.on('reset', function (flushSource) {
2582 destination.reset(flushSource);
2583 });
2584
2585 return destination;
2586 };
2587
2588 // Default stream functions that are expected to be overridden to perform
2589 // actual work. These are provided by the prototype as a sort of no-op
2590 // implementation so that we don't have to check for their existence in the
2591 // `pipe` function above.
2592 Stream.prototype.push = function (data) {
2593 this.trigger('data', data);
2594 };
2595
2596 Stream.prototype.flush = function (flushSource) {
2597 this.trigger('done', flushSource);
2598 };
2599
2600 Stream.prototype.partialFlush = function (flushSource) {
2601 this.trigger('partialdone', flushSource);
2602 };
2603
2604 Stream.prototype.endTimeline = function (flushSource) {
2605 this.trigger('endedtimeline', flushSource);
2606 };
2607
2608 Stream.prototype.reset = function (flushSource) {
2609 this.trigger('reset', flushSource);
2610 };
2611
2612 var stream = Stream;
2613
2614 /**
2615 * mux.js
2616 *
2617 * Copyright (c) Brightcove
2618 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
2619 *
2620 * Functions that generate fragmented MP4s suitable for use with Media
2621 * Source Extensions.
2622 */
2623
2624 var UINT32_MAX = Math.pow(2, 32) - 1;
2625
2626 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
2627
2628 // pre-calculate constants
2629 (function () {
2630 var i;
2631 types = {
2632 avc1: [], // codingname
2633 avcC: [],
2634 btrt: [],
2635 dinf: [],
2636 dref: [],
2637 esds: [],
2638 ftyp: [],
2639 hdlr: [],
2640 mdat: [],
2641 mdhd: [],
2642 mdia: [],
2643 mfhd: [],
2644 minf: [],
2645 moof: [],
2646 moov: [],
2647 mp4a: [], // codingname
2648 mvex: [],
2649 mvhd: [],
2650 pasp: [],
2651 sdtp: [],
2652 smhd: [],
2653 stbl: [],
2654 stco: [],
2655 stsc: [],
2656 stsd: [],
2657 stsz: [],
2658 stts: [],
2659 styp: [],
2660 tfdt: [],
2661 tfhd: [],
2662 traf: [],
2663 trak: [],
2664 trun: [],
2665 trex: [],
2666 tkhd: [],
2667 vmhd: []
2668 };
2669
2670 // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
2671 // don't throw an error
2672 if (typeof Uint8Array === 'undefined') {
2673 return;
2674 }
2675
2676 for (i in types) {
2677 if (types.hasOwnProperty(i)) {
2678 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
2679 }
2680 }
2681
2682 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
2683 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
2684 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
2685 VIDEO_HDLR = new Uint8Array([0x00, // version 0
2686 0x00, 0x00, 0x00, // flags
2687 0x00, 0x00, 0x00, 0x00, // pre_defined
2688 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
2689 0x00, 0x00, 0x00, 0x00, // reserved
2690 0x00, 0x00, 0x00, 0x00, // reserved
2691 0x00, 0x00, 0x00, 0x00, // reserved
2692 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
2693 ]);
2694 AUDIO_HDLR = new Uint8Array([0x00, // version 0
2695 0x00, 0x00, 0x00, // flags
2696 0x00, 0x00, 0x00, 0x00, // pre_defined
2697 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
2698 0x00, 0x00, 0x00, 0x00, // reserved
2699 0x00, 0x00, 0x00, 0x00, // reserved
2700 0x00, 0x00, 0x00, 0x00, // reserved
2701 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
2702 ]);
2703 HDLR_TYPES = {
2704 video: VIDEO_HDLR,
2705 audio: AUDIO_HDLR
2706 };
2707 DREF = new Uint8Array([0x00, // version 0
2708 0x00, 0x00, 0x00, // flags
2709 0x00, 0x00, 0x00, 0x01, // entry_count
2710 0x00, 0x00, 0x00, 0x0c, // entry_size
2711 0x75, 0x72, 0x6c, 0x20, // 'url' type
2712 0x00, // version 0
2713 0x00, 0x00, 0x01 // entry_flags
2714 ]);
2715 SMHD = new Uint8Array([0x00, // version
2716 0x00, 0x00, 0x00, // flags
2717 0x00, 0x00, // balance, 0 means centered
2718 0x00, 0x00 // reserved
2719 ]);
2720 STCO = new Uint8Array([0x00, // version
2721 0x00, 0x00, 0x00, // flags
2722 0x00, 0x00, 0x00, 0x00 // entry_count
2723 ]);
2724 STSC = STCO;
2725 STSZ = new Uint8Array([0x00, // version
2726 0x00, 0x00, 0x00, // flags
2727 0x00, 0x00, 0x00, 0x00, // sample_size
2728 0x00, 0x00, 0x00, 0x00 // sample_count
2729 ]);
2730 STTS = STCO;
2731 VMHD = new Uint8Array([0x00, // version
2732 0x00, 0x00, 0x01, // flags
2733 0x00, 0x00, // graphicsmode
2734 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
2735 ]);
2736 })();
2737
2738 box = function box(type) {
2739 var payload = [],
2740 size = 0,
2741 i,
2742 result,
2743 view;
2744
2745 for (i = 1; i < arguments.length; i++) {
2746 payload.push(arguments[i]);
2747 }
2748
2749 i = payload.length;
2750
2751 // calculate the total size we need to allocate
2752 while (i--) {
2753 size += payload[i].byteLength;
2754 }
2755 result = new Uint8Array(size + 8);
2756 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
2757 view.setUint32(0, result.byteLength);
2758 result.set(type, 4);
2759
2760 // copy the payload into the result
2761 for (i = 0, size = 8; i < payload.length; i++) {
2762 result.set(payload[i], size);
2763 size += payload[i].byteLength;
2764 }
2765 return result;
2766 };
2767
2768 dinf = function dinf() {
2769 return box(types.dinf, box(types.dref, DREF));
2770 };
2771
2772 esds = function esds(track) {
2773 return box(types.esds, new Uint8Array([0x00, // version
2774 0x00, 0x00, 0x00, // flags
2775
2776 // ES_Descriptor
2777 0x03, // tag, ES_DescrTag
2778 0x19, // length
2779 0x00, 0x00, // ES_ID
2780 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
2781
2782 // DecoderConfigDescriptor
2783 0x04, // tag, DecoderConfigDescrTag
2784 0x11, // length
2785 0x40, // object type
2786 0x15, // streamType
2787 0x00, 0x06, 0x00, // bufferSizeDB
2788 0x00, 0x00, 0xda, 0xc0, // maxBitrate
2789 0x00, 0x00, 0xda, 0xc0, // avgBitrate
2790
2791 // DecoderSpecificInfo
2792 0x05, // tag, DecoderSpecificInfoTag
2793 0x02, // length
2794 // ISO/IEC 14496-3, AudioSpecificConfig
2795 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
2796 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
2797 ]));
2798 };
2799
2800 ftyp = function ftyp() {
2801 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
2802 };
2803
2804 hdlr = function hdlr(type) {
2805 return box(types.hdlr, HDLR_TYPES[type]);
2806 };
2807 mdat = function mdat(data) {
2808 return box(types.mdat, data);
2809 };
2810 mdhd = function mdhd(track) {
2811 var result = new Uint8Array([0x00, // version 0
2812 0x00, 0x00, 0x00, // flags
2813 0x00, 0x00, 0x00, 0x02, // creation_time
2814 0x00, 0x00, 0x00, 0x03, // modification_time
2815 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
2816
2817 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
2818 0x55, 0xc4, // 'und' language (undetermined)
2819 0x00, 0x00]);
2820
2821 // Use the sample rate from the track metadata, when it is
2822 // defined. The sample rate can be parsed out of an ADTS header, for
2823 // instance.
2824 if (track.samplerate) {
2825 result[12] = track.samplerate >>> 24 & 0xFF;
2826 result[13] = track.samplerate >>> 16 & 0xFF;
2827 result[14] = track.samplerate >>> 8 & 0xFF;
2828 result[15] = track.samplerate & 0xFF;
2829 }
2830
2831 return box(types.mdhd, result);
2832 };
2833 mdia = function mdia(track) {
2834 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
2835 };
2836 mfhd = function mfhd(sequenceNumber) {
2837 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
2838 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
2839 ]));
2840 };
2841 minf = function minf(track) {
2842 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
2843 };
2844 moof = function moof(sequenceNumber, tracks) {
2845 var trackFragments = [],
2846 i = tracks.length;
2847 // build traf boxes for each track fragment
2848 while (i--) {
2849 trackFragments[i] = traf(tracks[i]);
2850 }
2851 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
2852 };
2853 /**
2854 * Returns a movie box.
2855 * @param tracks {array} the tracks associated with this movie
2856 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
2857 */
2858 moov = function moov(tracks) {
2859 var i = tracks.length,
2860 boxes = [];
2861
2862 while (i--) {
2863 boxes[i] = trak(tracks[i]);
2864 }
2865
2866 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
2867 };
2868 mvex = function mvex(tracks) {
2869 var i = tracks.length,
2870 boxes = [];
2871
2872 while (i--) {
2873 boxes[i] = trex(tracks[i]);
2874 }
2875 return box.apply(null, [types.mvex].concat(boxes));
2876 };
2877 mvhd = function mvhd(duration) {
2878 var bytes = new Uint8Array([0x00, // version 0
2879 0x00, 0x00, 0x00, // flags
2880 0x00, 0x00, 0x00, 0x01, // creation_time
2881 0x00, 0x00, 0x00, 0x02, // modification_time
2882 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
2883 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
2884 0x00, 0x01, 0x00, 0x00, // 1.0 rate
2885 0x01, 0x00, // 1.0 volume
2886 0x00, 0x00, // reserved
2887 0x00, 0x00, 0x00, 0x00, // reserved
2888 0x00, 0x00, 0x00, 0x00, // reserved
2889 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
2890 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
2891 0xff, 0xff, 0xff, 0xff // next_track_ID
2892 ]);
2893 return box(types.mvhd, bytes);
2894 };
2895
2896 sdtp = function sdtp(track) {
2897 var samples = track.samples || [],
2898 bytes = new Uint8Array(4 + samples.length),
2899 flags,
2900 i;
2901
2902 // leave the full box header (4 bytes) all zero
2903
2904 // write the sample table
2905 for (i = 0; i < samples.length; i++) {
2906 flags = samples[i].flags;
2907
2908 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
2909 }
2910
2911 return box(types.sdtp, bytes);
2912 };
2913
2914 stbl = function stbl(track) {
2915 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
2916 };
2917
2918 (function () {
2919 var videoSample, audioSample;
2920
2921 stsd = function stsd(track) {
2922
2923 return box(types.stsd, new Uint8Array([0x00, // version 0
2924 0x00, 0x00, 0x00, // flags
2925 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
2926 };
2927
2928 videoSample = function videoSample(track) {
2929 var sps = track.sps || [],
2930 pps = track.pps || [],
2931 sequenceParameterSets = [],
2932 pictureParameterSets = [],
2933 i,
2934 avc1Box;
2935
2936 // assemble the SPSs
2937 for (i = 0; i < sps.length; i++) {
2938 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
2939 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
2940 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
2941 }
2942
2943 // assemble the PPSs
2944 for (i = 0; i < pps.length; i++) {
2945 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
2946 pictureParameterSets.push(pps[i].byteLength & 0xFF);
2947 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
2948 }
2949
2950 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
2951 0x00, 0x01, // data_reference_index
2952 0x00, 0x00, // pre_defined
2953 0x00, 0x00, // reserved
2954 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
2955 (track.width & 0xff00) >> 8, track.width & 0xff, // width
2956 (track.height & 0xff00) >> 8, track.height & 0xff, // height
2957 0x00, 0x48, 0x00, 0x00, // horizresolution
2958 0x00, 0x48, 0x00, 0x00, // vertresolution
2959 0x00, 0x00, 0x00, 0x00, // reserved
2960 0x00, 0x01, // frame_count
2961 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
2962 0x00, 0x18, // depth = 24
2963 0x11, 0x11 // pre_defined = -1
2964 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
2965 track.profileIdc, // AVCProfileIndication
2966 track.profileCompatibility, // profile_compatibility
2967 track.levelIdc, // AVCLevelIndication
2968 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
2969 ].concat([sps.length], // numOfSequenceParameterSets
2970 sequenceParameterSets, // "SPS"
2971 [pps.length], // numOfPictureParameterSets
2972 pictureParameterSets // "PPS"
2973 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
2974 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
2975 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
2976 ]))];
2977
2978 if (track.sarRatio) {
2979 var hSpacing = track.sarRatio[0],
2980 vSpacing = track.sarRatio[1];
2981
2982 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
2983 }
2984
2985 return box.apply(null, avc1Box);
2986 };
2987
2988 audioSample = function audioSample(track) {
2989 return box(types.mp4a, new Uint8Array([
2990
2991 // SampleEntry, ISO/IEC 14496-12
2992 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
2993 0x00, 0x01, // data_reference_index
2994
2995 // AudioSampleEntry, ISO/IEC 14496-12
2996 0x00, 0x00, 0x00, 0x00, // reserved
2997 0x00, 0x00, 0x00, 0x00, // reserved
2998 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
2999
3000 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
3001 0x00, 0x00, // pre_defined
3002 0x00, 0x00, // reserved
3003
3004 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
3005
3006 // MP4AudioSampleEntry, ISO/IEC 14496-14
3007 ]), esds(track));
3008 };
3009 })();
3010
3011 tkhd = function tkhd(track) {
3012 var result = new Uint8Array([0x00, // version 0
3013 0x00, 0x00, 0x07, // flags
3014 0x00, 0x00, 0x00, 0x00, // creation_time
3015 0x00, 0x00, 0x00, 0x00, // modification_time
3016 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
3017 0x00, 0x00, 0x00, 0x00, // reserved
3018 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
3019 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
3020 0x00, 0x00, // layer
3021 0x00, 0x00, // alternate_group
3022 0x01, 0x00, // non-audio track volume
3023 0x00, 0x00, // reserved
3024 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
3025 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
3026 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
3027 ]);
3028
3029 return box(types.tkhd, result);
3030 };
3031
3032 /**
3033 * Generate a track fragment (traf) box. A traf box collects metadata
3034 * about tracks in a movie fragment (moof) box.
3035 */
3036 traf = function traf(track) {
3037 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
3038
3039 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
3040 0x00, 0x00, 0x3a, // flags
3041 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
3042 0x00, 0x00, 0x00, 0x01, // sample_description_index
3043 0x00, 0x00, 0x00, 0x00, // default_sample_duration
3044 0x00, 0x00, 0x00, 0x00, // default_sample_size
3045 0x00, 0x00, 0x00, 0x00 // default_sample_flags
3046 ]));
3047
3048 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
3049 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
3050
3051 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
3052 0x00, 0x00, 0x00, // flags
3053 // baseMediaDecodeTime
3054 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF]));
3055
3056 // the data offset specifies the number of bytes from the start of
3057 // the containing moof to the first payload byte of the associated
3058 // mdat
3059 dataOffset = 32 + // tfhd
3060 20 + // tfdt
3061 8 + // traf header
3062 16 + // mfhd
3063 8 + // moof header
3064 8; // mdat header
3065
3066 // audio tracks require less metadata
3067 if (track.type === 'audio') {
3068 trackFragmentRun = trun(track, dataOffset);
3069 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
3070 }
3071
3072 // video tracks should contain an independent and disposable samples
3073 // box (sdtp)
3074 // generate one and adjust offsets to match
3075 sampleDependencyTable = sdtp(track);
3076 trackFragmentRun = trun(track, sampleDependencyTable.length + dataOffset);
3077 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
3078 };
3079
3080 /**
3081 * Generate a track box.
3082 * @param track {object} a track definition
3083 * @return {Uint8Array} the track box
3084 */
3085 trak = function trak(track) {
3086 track.duration = track.duration || 0xffffffff;
3087 return box(types.trak, tkhd(track), mdia(track));
3088 };
3089
3090 trex = function trex(track) {
3091 var result = new Uint8Array([0x00, // version 0
3092 0x00, 0x00, 0x00, // flags
3093 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
3094 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
3095 0x00, 0x00, 0x00, 0x00, // default_sample_duration
3096 0x00, 0x00, 0x00, 0x00, // default_sample_size
3097 0x00, 0x01, 0x00, 0x01 // default_sample_flags
3098 ]);
3099 // the last two bytes of default_sample_flags is the sample
3100 // degradation priority, a hint about the importance of this sample
3101 // relative to others. Lower the degradation priority for all sample
3102 // types other than video.
3103 if (track.type !== 'video') {
3104 result[result.length - 1] = 0x00;
3105 }
3106
3107 return box(types.trex, result);
3108 };
3109
3110 (function () {
3111 var audioTrun, videoTrun, trunHeader;
3112
3113 // This method assumes all samples are uniform. That is, if a
3114 // duration is present for the first sample, it will be present for
3115 // all subsequent samples.
3116 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
3117 trunHeader = function trunHeader(samples, offset) {
3118 var durationPresent = 0,
3119 sizePresent = 0,
3120 flagsPresent = 0,
3121 compositionTimeOffset = 0;
3122
3123 // trun flag constants
3124 if (samples.length) {
3125 if (samples[0].duration !== undefined) {
3126 durationPresent = 0x1;
3127 }
3128 if (samples[0].size !== undefined) {
3129 sizePresent = 0x2;
3130 }
3131 if (samples[0].flags !== undefined) {
3132 flagsPresent = 0x4;
3133 }
3134 if (samples[0].compositionTimeOffset !== undefined) {
3135 compositionTimeOffset = 0x8;
3136 }
3137 }
3138
3139 return [0x00, // version 0
3140 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
3141 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
3142 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
3143 ];
3144 };
3145
3146 videoTrun = function videoTrun(track, offset) {
3147 var bytesOffest, bytes, header, samples, sample, i;
3148
3149 samples = track.samples || [];
3150 offset += 8 + 12 + 16 * samples.length;
3151 header = trunHeader(samples, offset);
3152 bytes = new Uint8Array(header.length + samples.length * 16);
3153 bytes.set(header);
3154 bytesOffest = header.length;
3155
3156 for (i = 0; i < samples.length; i++) {
3157 sample = samples[i];
3158
3159 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
3160 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
3161 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
3162 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
3163 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
3164 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
3165 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
3166 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
3167 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
3168 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
3169 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
3170 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
3171 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
3172 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
3173 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
3174 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
3175 }
3176 return box(types.trun, bytes);
3177 };
3178
3179 audioTrun = function audioTrun(track, offset) {
3180 var bytes, bytesOffest, header, samples, sample, i;
3181
3182 samples = track.samples || [];
3183 offset += 8 + 12 + 8 * samples.length;
3184
3185 header = trunHeader(samples, offset);
3186 bytes = new Uint8Array(header.length + samples.length * 8);
3187 bytes.set(header);
3188 bytesOffest = header.length;
3189
3190 for (i = 0; i < samples.length; i++) {
3191 sample = samples[i];
3192 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
3193 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
3194 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
3195 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
3196 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
3197 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
3198 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
3199 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
3200 }
3201
3202 return box(types.trun, bytes);
3203 };
3204
3205 trun = function trun(track, offset) {
3206 if (track.type === 'audio') {
3207 return audioTrun(track, offset);
3208 }
3209
3210 return videoTrun(track, offset);
3211 };
3212 })();
3213
3214 var mp4Generator = {
3215 ftyp: ftyp,
3216 mdat: mdat,
3217 moof: moof,
3218 moov: moov,
3219 initSegment: function initSegment(tracks) {
3220 var fileType = ftyp(),
3221 movie = moov(tracks),
3222 result;
3223
3224 result = new Uint8Array(fileType.byteLength + movie.byteLength);
3225 result.set(fileType);
3226 result.set(movie, fileType.byteLength);
3227 return result;
3228 }
3229 };
3230
3231 /**
3232 * mux.js
3233 *
3234 * Copyright (c) Brightcove
3235 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3236 */
3237 // Convert an array of nal units into an array of frames with each frame being
3238 // composed of the nal units that make up that frame
3239 // Also keep track of cummulative data about the frame from the nal units such
3240 // as the frame duration, starting pts, etc.
3241 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
3242 var i,
3243 currentNal,
3244 currentFrame = [],
3245 frames = [];
3246
3247 // TODO added for LHLS, make sure this is OK
3248 frames.byteLength = 0;
3249 frames.nalCount = 0;
3250 frames.duration = 0;
3251
3252 currentFrame.byteLength = 0;
3253
3254 for (i = 0; i < nalUnits.length; i++) {
3255 currentNal = nalUnits[i];
3256
3257 // Split on 'aud'-type nal units
3258 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
3259 // Since the very first nal unit is expected to be an AUD
3260 // only push to the frames array when currentFrame is not empty
3261 if (currentFrame.length) {
3262 currentFrame.duration = currentNal.dts - currentFrame.dts;
3263 // TODO added for LHLS, make sure this is OK
3264 frames.byteLength += currentFrame.byteLength;
3265 frames.nalCount += currentFrame.length;
3266 frames.duration += currentFrame.duration;
3267 frames.push(currentFrame);
3268 }
3269 currentFrame = [currentNal];
3270 currentFrame.byteLength = currentNal.data.byteLength;
3271 currentFrame.pts = currentNal.pts;
3272 currentFrame.dts = currentNal.dts;
3273 } else {
3274 // Specifically flag key frames for ease of use later
3275 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
3276 currentFrame.keyFrame = true;
3277 }
3278 currentFrame.duration = currentNal.dts - currentFrame.dts;
3279 currentFrame.byteLength += currentNal.data.byteLength;
3280 currentFrame.push(currentNal);
3281 }
3282 }
3283
3284 // For the last frame, use the duration of the previous frame if we
3285 // have nothing better to go on
3286 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
3287 currentFrame.duration = frames[frames.length - 1].duration;
3288 }
3289
3290 // Push the final frame
3291 // TODO added for LHLS, make sure this is OK
3292 frames.byteLength += currentFrame.byteLength;
3293 frames.nalCount += currentFrame.length;
3294 frames.duration += currentFrame.duration;
3295
3296 frames.push(currentFrame);
3297 return frames;
3298 };
3299
3300 // Convert an array of frames into an array of Gop with each Gop being composed
3301 // of the frames that make up that Gop
3302 // Also keep track of cummulative data about the Gop from the frames such as the
3303 // Gop duration, starting pts, etc.
3304 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
3305 var i,
3306 currentFrame,
3307 currentGop = [],
3308 gops = [];
3309
3310 // We must pre-set some of the values on the Gop since we
3311 // keep running totals of these values
3312 currentGop.byteLength = 0;
3313 currentGop.nalCount = 0;
3314 currentGop.duration = 0;
3315 currentGop.pts = frames[0].pts;
3316 currentGop.dts = frames[0].dts;
3317
3318 // store some metadata about all the Gops
3319 gops.byteLength = 0;
3320 gops.nalCount = 0;
3321 gops.duration = 0;
3322 gops.pts = frames[0].pts;
3323 gops.dts = frames[0].dts;
3324
3325 for (i = 0; i < frames.length; i++) {
3326 currentFrame = frames[i];
3327
3328 if (currentFrame.keyFrame) {
3329 // Since the very first frame is expected to be an keyframe
3330 // only push to the gops array when currentGop is not empty
3331 if (currentGop.length) {
3332 gops.push(currentGop);
3333 gops.byteLength += currentGop.byteLength;
3334 gops.nalCount += currentGop.nalCount;
3335 gops.duration += currentGop.duration;
3336 }
3337
3338 currentGop = [currentFrame];
3339 currentGop.nalCount = currentFrame.length;
3340 currentGop.byteLength = currentFrame.byteLength;
3341 currentGop.pts = currentFrame.pts;
3342 currentGop.dts = currentFrame.dts;
3343 currentGop.duration = currentFrame.duration;
3344 } else {
3345 currentGop.duration += currentFrame.duration;
3346 currentGop.nalCount += currentFrame.length;
3347 currentGop.byteLength += currentFrame.byteLength;
3348 currentGop.push(currentFrame);
3349 }
3350 }
3351
3352 if (gops.length && currentGop.duration <= 0) {
3353 currentGop.duration = gops[gops.length - 1].duration;
3354 }
3355 gops.byteLength += currentGop.byteLength;
3356 gops.nalCount += currentGop.nalCount;
3357 gops.duration += currentGop.duration;
3358
3359 // push the final Gop
3360 gops.push(currentGop);
3361 return gops;
3362 };
3363
3364 /*
3365 * Search for the first keyframe in the GOPs and throw away all frames
3366 * until that keyframe. Then extend the duration of the pulled keyframe
3367 * and pull the PTS and DTS of the keyframe so that it covers the time
3368 * range of the frames that were disposed.
3369 *
3370 * @param {Array} gops video GOPs
3371 * @returns {Array} modified video GOPs
3372 */
3373 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
3374 var currentGop;
3375
3376 if (!gops[0][0].keyFrame && gops.length > 1) {
3377 // Remove the first GOP
3378 currentGop = gops.shift();
3379
3380 gops.byteLength -= currentGop.byteLength;
3381 gops.nalCount -= currentGop.nalCount;
3382
3383 // Extend the first frame of what is now the
3384 // first gop to cover the time period of the
3385 // frames we just removed
3386 gops[0][0].dts = currentGop.dts;
3387 gops[0][0].pts = currentGop.pts;
3388 gops[0][0].duration += currentGop.duration;
3389 }
3390
3391 return gops;
3392 };
3393
3394 /**
3395 * Default sample object
3396 * see ISO/IEC 14496-12:2012, section 8.6.4.3
3397 */
3398 var createDefaultSample = function createDefaultSample() {
3399 return {
3400 size: 0,
3401 flags: {
3402 isLeading: 0,
3403 dependsOn: 1,
3404 isDependedOn: 0,
3405 hasRedundancy: 0,
3406 degradationPriority: 0,
3407 isNonSyncSample: 1
3408 }
3409 };
3410 };
3411
3412 /*
3413 * Collates information from a video frame into an object for eventual
3414 * entry into an MP4 sample table.
3415 *
3416 * @param {Object} frame the video frame
3417 * @param {Number} dataOffset the byte offset to position the sample
3418 * @return {Object} object containing sample table info for a frame
3419 */
3420 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
3421 var sample = createDefaultSample();
3422
3423 sample.dataOffset = dataOffset;
3424 sample.compositionTimeOffset = frame.pts - frame.dts;
3425 sample.duration = frame.duration;
3426 sample.size = 4 * frame.length; // Space for nal unit size
3427 sample.size += frame.byteLength;
3428
3429 if (frame.keyFrame) {
3430 sample.flags.dependsOn = 2;
3431 sample.flags.isNonSyncSample = 0;
3432 }
3433
3434 return sample;
3435 };
3436
3437 // generate the track's sample table from an array of gops
3438 var generateSampleTable = function generateSampleTable(gops, baseDataOffset) {
3439 var h,
3440 i,
3441 sample,
3442 currentGop,
3443 currentFrame,
3444 dataOffset = baseDataOffset || 0,
3445 samples = [];
3446
3447 for (h = 0; h < gops.length; h++) {
3448 currentGop = gops[h];
3449
3450 for (i = 0; i < currentGop.length; i++) {
3451 currentFrame = currentGop[i];
3452
3453 sample = sampleForFrame(currentFrame, dataOffset);
3454
3455 dataOffset += sample.size;
3456
3457 samples.push(sample);
3458 }
3459 }
3460 return samples;
3461 };
3462
3463 // generate the track's raw mdat data from an array of gops
3464 var concatenateNalData = function concatenateNalData(gops) {
3465 var h,
3466 i,
3467 j,
3468 currentGop,
3469 currentFrame,
3470 currentNal,
3471 dataOffset = 0,
3472 nalsByteLength = gops.byteLength,
3473 numberOfNals = gops.nalCount,
3474 totalByteLength = nalsByteLength + 4 * numberOfNals,
3475 data = new Uint8Array(totalByteLength),
3476 view = new DataView(data.buffer);
3477
3478 // For each Gop..
3479 for (h = 0; h < gops.length; h++) {
3480 currentGop = gops[h];
3481
3482 // For each Frame..
3483 for (i = 0; i < currentGop.length; i++) {
3484 currentFrame = currentGop[i];
3485
3486 // For each NAL..
3487 for (j = 0; j < currentFrame.length; j++) {
3488 currentNal = currentFrame[j];
3489
3490 view.setUint32(dataOffset, currentNal.data.byteLength);
3491 dataOffset += 4;
3492 data.set(currentNal.data, dataOffset);
3493 dataOffset += currentNal.data.byteLength;
3494 }
3495 }
3496 }
3497 return data;
3498 };
3499
3500 // generate the track's sample table from a frame
3501 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
3502 var sample,
3503 dataOffset = baseDataOffset || 0,
3504 samples = [];
3505
3506 sample = sampleForFrame(frame, dataOffset);
3507 samples.push(sample);
3508
3509 return samples;
3510 };
3511
3512 // generate the track's raw mdat data from a frame
3513 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
3514 var i,
3515 currentNal,
3516 dataOffset = 0,
3517 nalsByteLength = frame.byteLength,
3518 numberOfNals = frame.length,
3519 totalByteLength = nalsByteLength + 4 * numberOfNals,
3520 data = new Uint8Array(totalByteLength),
3521 view = new DataView(data.buffer);
3522
3523 // For each NAL..
3524 for (i = 0; i < frame.length; i++) {
3525 currentNal = frame[i];
3526
3527 view.setUint32(dataOffset, currentNal.data.byteLength);
3528 dataOffset += 4;
3529 data.set(currentNal.data, dataOffset);
3530 dataOffset += currentNal.data.byteLength;
3531 }
3532
3533 return data;
3534 };
3535
3536 var frameUtils = {
3537 groupNalsIntoFrames: groupNalsIntoFrames,
3538 groupFramesIntoGops: groupFramesIntoGops,
3539 extendFirstKeyFrame: extendFirstKeyFrame,
3540 generateSampleTable: generateSampleTable,
3541 concatenateNalData: concatenateNalData,
3542 generateSampleTableForFrame: generateSampleTableForFrame,
3543 concatenateNalDataForFrame: concatenateNalDataForFrame
3544 };
3545
3546 /**
3547 * mux.js
3548 *
3549 * Copyright (c) Brightcove
3550 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3551 */
3552 var highPrefix = [33, 16, 5, 32, 164, 27];
3553 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
3554 var zeroFill = function zeroFill(count) {
3555 var a = [];
3556 while (count--) {
3557 a.push(0);
3558 }
3559 return a;
3560 };
3561
3562 var makeTable = function makeTable(metaTable) {
3563 return Object.keys(metaTable).reduce(function (obj, key) {
3564 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
3565 return arr.concat(part);
3566 }, []));
3567 return obj;
3568 }, {});
3569 };
3570
3571 var silence;
3572
3573 var silence_1 = function silence_1() {
3574 if (!silence) {
3575 // Frames-of-silence to use for filling in missing AAC frames
3576 var coneOfSilence = {
3577 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
3578 88200: [highPrefix, [231], zeroFill(170), [56]],
3579 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
3580 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
3581 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
3582 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
3583 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
3584 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
3585 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
3586 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
3587 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
3588 };
3589 silence = makeTable(coneOfSilence);
3590 }
3591 return silence;
3592 };
3593
3594 /**
3595 * mux.js
3596 *
3597 * Copyright (c) Brightcove
3598 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3599 */
3600 var ONE_SECOND_IN_TS = 90000,
3601
3602 // 90kHz clock
3603 secondsToVideoTs,
3604 secondsToAudioTs,
3605 videoTsToSeconds,
3606 audioTsToSeconds,
3607 audioTsToVideoTs,
3608 videoTsToAudioTs,
3609 metadataTsToSeconds;
3610
3611 secondsToVideoTs = function secondsToVideoTs(seconds) {
3612 return seconds * ONE_SECOND_IN_TS;
3613 };
3614
3615 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
3616 return seconds * sampleRate;
3617 };
3618
3619 videoTsToSeconds = function videoTsToSeconds(timestamp) {
3620 return timestamp / ONE_SECOND_IN_TS;
3621 };
3622
3623 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
3624 return timestamp / sampleRate;
3625 };
3626
3627 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
3628 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
3629 };
3630
3631 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
3632 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
3633 };
3634
3635 /**
3636 * Adjust ID3 tag or caption timing information by the timeline pts values
3637 * (if keepOriginalTimestamps is false) and convert to seconds
3638 */
3639 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
3640 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
3641 };
3642
3643 var clock = {
3644 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
3645 secondsToVideoTs: secondsToVideoTs,
3646 secondsToAudioTs: secondsToAudioTs,
3647 videoTsToSeconds: videoTsToSeconds,
3648 audioTsToSeconds: audioTsToSeconds,
3649 audioTsToVideoTs: audioTsToVideoTs,
3650 videoTsToAudioTs: videoTsToAudioTs,
3651 metadataTsToSeconds: metadataTsToSeconds
3652 };
3653
3654 /**
3655 * mux.js
3656 *
3657 * Copyright (c) Brightcove
3658 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3659 */
3660
3661 /**
3662 * Sum the `byteLength` properties of the data in each AAC frame
3663 */
3664 var sumFrameByteLengths = function sumFrameByteLengths(array) {
3665 var i,
3666 currentObj,
3667 sum = 0;
3668
3669 // sum the byteLength's all each nal unit in the frame
3670 for (i = 0; i < array.length; i++) {
3671 currentObj = array[i];
3672 sum += currentObj.data.byteLength;
3673 }
3674
3675 return sum;
3676 };
3677
3678 // Possibly pad (prefix) the audio track with silence if appending this track
3679 // would lead to the introduction of a gap in the audio buffer
3680 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
3681 var baseMediaDecodeTimeTs,
3682 frameDuration = 0,
3683 audioGapDuration = 0,
3684 audioFillFrameCount = 0,
3685 audioFillDuration = 0,
3686 silentFrame,
3687 i,
3688 firstFrame;
3689
3690 if (!frames.length) {
3691 return;
3692 }
3693
3694 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
3695 // determine frame clock duration based on sample rate, round up to avoid overfills
3696 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
3697
3698 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
3699 // insert the shortest possible amount (audio gap or audio to video gap)
3700 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
3701 // number of full frames in the audio gap
3702 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
3703 audioFillDuration = audioFillFrameCount * frameDuration;
3704 }
3705
3706 // don't attempt to fill gaps smaller than a single frame or larger
3707 // than a half second
3708 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
3709 return;
3710 }
3711
3712 silentFrame = silence_1()[track.samplerate];
3713
3714 if (!silentFrame) {
3715 // we don't have a silent frame pregenerated for the sample rate, so use a frame
3716 // from the content instead
3717 silentFrame = frames[0].data;
3718 }
3719
3720 for (i = 0; i < audioFillFrameCount; i++) {
3721 firstFrame = frames[0];
3722
3723 frames.splice(0, 0, {
3724 data: silentFrame,
3725 dts: firstFrame.dts - frameDuration,
3726 pts: firstFrame.pts - frameDuration
3727 });
3728 }
3729
3730 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
3731 };
3732
3733 // If the audio segment extends before the earliest allowed dts
3734 // value, remove AAC frames until starts at or after the earliest
3735 // allowed DTS so that we don't end up with a negative baseMedia-
3736 // DecodeTime for the audio track
3737 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
3738 if (track.minSegmentDts >= earliestAllowedDts) {
3739 return adtsFrames;
3740 }
3741
3742 // We will need to recalculate the earliest segment Dts
3743 track.minSegmentDts = Infinity;
3744
3745 return adtsFrames.filter(function (currentFrame) {
3746 // If this is an allowed frame, keep it and record it's Dts
3747 if (currentFrame.dts >= earliestAllowedDts) {
3748 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
3749 track.minSegmentPts = track.minSegmentDts;
3750 return true;
3751 }
3752 // Otherwise, discard it
3753 return false;
3754 });
3755 };
3756
3757 // generate the track's raw mdat data from an array of frames
3758 var generateSampleTable$1 = function generateSampleTable(frames) {
3759 var i,
3760 currentFrame,
3761 samples = [];
3762
3763 for (i = 0; i < frames.length; i++) {
3764 currentFrame = frames[i];
3765 samples.push({
3766 size: currentFrame.data.byteLength,
3767 duration: 1024 // For AAC audio, all samples contain 1024 samples
3768 });
3769 }
3770 return samples;
3771 };
3772
3773 // generate the track's sample table from an array of frames
3774 var concatenateFrameData = function concatenateFrameData(frames) {
3775 var i,
3776 currentFrame,
3777 dataOffset = 0,
3778 data = new Uint8Array(sumFrameByteLengths(frames));
3779
3780 for (i = 0; i < frames.length; i++) {
3781 currentFrame = frames[i];
3782
3783 data.set(currentFrame.data, dataOffset);
3784 dataOffset += currentFrame.data.byteLength;
3785 }
3786 return data;
3787 };
3788
3789 var audioFrameUtils = {
3790 prefixWithSilence: prefixWithSilence,
3791 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
3792 generateSampleTable: generateSampleTable$1,
3793 concatenateFrameData: concatenateFrameData
3794 };
3795
3796 /**
3797 * mux.js
3798 *
3799 * Copyright (c) Brightcove
3800 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3801 */
3802 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS;
3803
3804 /**
3805 * Store information about the start and end of the track and the
3806 * duration for each frame/sample we process in order to calculate
3807 * the baseMediaDecodeTime
3808 */
3809 var collectDtsInfo = function collectDtsInfo(track, data) {
3810 if (typeof data.pts === 'number') {
3811 if (track.timelineStartInfo.pts === undefined) {
3812 track.timelineStartInfo.pts = data.pts;
3813 }
3814
3815 if (track.minSegmentPts === undefined) {
3816 track.minSegmentPts = data.pts;
3817 } else {
3818 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
3819 }
3820
3821 if (track.maxSegmentPts === undefined) {
3822 track.maxSegmentPts = data.pts;
3823 } else {
3824 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
3825 }
3826 }
3827
3828 if (typeof data.dts === 'number') {
3829 if (track.timelineStartInfo.dts === undefined) {
3830 track.timelineStartInfo.dts = data.dts;
3831 }
3832
3833 if (track.minSegmentDts === undefined) {
3834 track.minSegmentDts = data.dts;
3835 } else {
3836 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
3837 }
3838
3839 if (track.maxSegmentDts === undefined) {
3840 track.maxSegmentDts = data.dts;
3841 } else {
3842 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
3843 }
3844 }
3845 };
3846
3847 /**
3848 * Clear values used to calculate the baseMediaDecodeTime between
3849 * tracks
3850 */
3851 var clearDtsInfo = function clearDtsInfo(track) {
3852 delete track.minSegmentDts;
3853 delete track.maxSegmentDts;
3854 delete track.minSegmentPts;
3855 delete track.maxSegmentPts;
3856 };
3857
3858 /**
3859 * Calculate the track's baseMediaDecodeTime based on the earliest
3860 * DTS the transmuxer has ever seen and the minimum DTS for the
3861 * current track
3862 * @param track {object} track metadata configuration
3863 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
3864 * in the source; false to adjust the first segment to start at 0.
3865 */
3866 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
3867 var baseMediaDecodeTime,
3868 scale,
3869 minSegmentDts = track.minSegmentDts;
3870
3871 // Optionally adjust the time so the first segment starts at zero.
3872 if (!keepOriginalTimestamps) {
3873 minSegmentDts -= track.timelineStartInfo.dts;
3874 }
3875
3876 // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
3877 // we want the start of the first segment to be placed
3878 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
3879
3880 // Add to that the distance this segment is from the very first
3881 baseMediaDecodeTime += minSegmentDts;
3882
3883 // baseMediaDecodeTime must not become negative
3884 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
3885
3886 if (track.type === 'audio') {
3887 // Audio has a different clock equal to the sampling_rate so we need to
3888 // scale the PTS values into the clock rate of the track
3889 scale = track.samplerate / ONE_SECOND_IN_TS$1;
3890 baseMediaDecodeTime *= scale;
3891 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
3892 }
3893
3894 return baseMediaDecodeTime;
3895 };
3896
3897 var trackDecodeInfo = {
3898 clearDtsInfo: clearDtsInfo,
3899 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
3900 collectDtsInfo: collectDtsInfo
3901 };
3902
3903 /**
3904 * mux.js
3905 *
3906 * Copyright (c) Brightcove
3907 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3908 *
3909 * Reads in-band caption information from a video elementary
3910 * stream. Captions must follow the CEA-708 standard for injection
3911 * into an MPEG-2 transport streams.
3912 * @see https://en.wikipedia.org/wiki/CEA-708
3913 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
3914 */
3915
3916 // Supplemental enhancement information (SEI) NAL units have a
3917 // payload type field to indicate how they are to be
3918 // interpreted. CEAS-708 caption content is always transmitted with
3919 // payload type 0x04.
3920
3921 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
3922 RBSP_TRAILING_BITS = 128;
3923
3924 /**
3925 * Parse a supplemental enhancement information (SEI) NAL unit.
3926 * Stops parsing once a message of type ITU T T35 has been found.
3927 *
3928 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
3929 * @return {object} the parsed SEI payload
3930 * @see Rec. ITU-T H.264, 7.3.2.3.1
3931 */
3932 var parseSei = function parseSei(bytes) {
3933 var i = 0,
3934 result = {
3935 payloadType: -1,
3936 payloadSize: 0
3937 },
3938 payloadType = 0,
3939 payloadSize = 0;
3940
3941 // go through the sei_rbsp parsing each each individual sei_message
3942 while (i < bytes.byteLength) {
3943 // stop once we have hit the end of the sei_rbsp
3944 if (bytes[i] === RBSP_TRAILING_BITS) {
3945 break;
3946 }
3947
3948 // Parse payload type
3949 while (bytes[i] === 0xFF) {
3950 payloadType += 255;
3951 i++;
3952 }
3953 payloadType += bytes[i++];
3954
3955 // Parse payload size
3956 while (bytes[i] === 0xFF) {
3957 payloadSize += 255;
3958 i++;
3959 }
3960 payloadSize += bytes[i++];
3961
3962 // this sei_message is a 608/708 caption so save it and break
3963 // there can only ever be one caption message in a frame's sei
3964 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
3965 result.payloadType = payloadType;
3966 result.payloadSize = payloadSize;
3967 result.payload = bytes.subarray(i, i + payloadSize);
3968 break;
3969 }
3970
3971 // skip the payload and parse the next message
3972 i += payloadSize;
3973 payloadType = 0;
3974 payloadSize = 0;
3975 }
3976
3977 return result;
3978 };
3979
3980 // see ANSI/SCTE 128-1 (2013), section 8.1
3981 var parseUserData = function parseUserData(sei) {
3982 // itu_t_t35_contry_code must be 181 (United States) for
3983 // captions
3984 if (sei.payload[0] !== 181) {
3985 return null;
3986 }
3987
3988 // itu_t_t35_provider_code should be 49 (ATSC) for captions
3989 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
3990 return null;
3991 }
3992
3993 // the user_identifier should be "GA94" to indicate ATSC1 data
3994 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
3995 return null;
3996 }
3997
3998 // finally, user_data_type_code should be 0x03 for caption data
3999 if (sei.payload[7] !== 0x03) {
4000 return null;
4001 }
4002
4003 // return the user_data_type_structure and strip the trailing
4004 // marker bits
4005 return sei.payload.subarray(8, sei.payload.length - 1);
4006 };
4007
4008 // see CEA-708-D, section 4.4
4009 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
4010 var results = [],
4011 i,
4012 count,
4013 offset,
4014 data;
4015
4016 // if this is just filler, return immediately
4017 if (!(userData[0] & 0x40)) {
4018 return results;
4019 }
4020
4021 // parse out the cc_data_1 and cc_data_2 fields
4022 count = userData[0] & 0x1f;
4023 for (i = 0; i < count; i++) {
4024 offset = i * 3;
4025 data = {
4026 type: userData[offset + 2] & 0x03,
4027 pts: pts
4028 };
4029
4030 // capture cc data when cc_valid is 1
4031 if (userData[offset + 2] & 0x04) {
4032 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
4033 results.push(data);
4034 }
4035 }
4036 return results;
4037 };
4038
4039 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
4040 var length = data.byteLength,
4041 emulationPreventionBytesPositions = [],
4042 i = 1,
4043 newLength,
4044 newData;
4045
4046 // Find all `Emulation Prevention Bytes`
4047 while (i < length - 2) {
4048 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
4049 emulationPreventionBytesPositions.push(i + 2);
4050 i += 2;
4051 } else {
4052 i++;
4053 }
4054 }
4055
4056 // If no Emulation Prevention Bytes were found just return the original
4057 // array
4058 if (emulationPreventionBytesPositions.length === 0) {
4059 return data;
4060 }
4061
4062 // Create a new array to hold the NAL unit data
4063 newLength = length - emulationPreventionBytesPositions.length;
4064 newData = new Uint8Array(newLength);
4065 var sourceIndex = 0;
4066
4067 for (i = 0; i < newLength; sourceIndex++, i++) {
4068 if (sourceIndex === emulationPreventionBytesPositions[0]) {
4069 // Skip this byte
4070 sourceIndex++;
4071 // Remove this position index
4072 emulationPreventionBytesPositions.shift();
4073 }
4074 newData[i] = data[sourceIndex];
4075 }
4076
4077 return newData;
4078 };
4079
4080 // exports
4081 var captionPacketParser = {
4082 parseSei: parseSei,
4083 parseUserData: parseUserData,
4084 parseCaptionPackets: parseCaptionPackets,
4085 discardEmulationPreventionBytes: discardEmulationPreventionBytes,
4086 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
4087 };
4088
4089 // -----------------
4090 // Link To Transport
4091 // -----------------
4092
4093
4094 var CaptionStream = function CaptionStream() {
4095
4096 CaptionStream.prototype.init.call(this);
4097
4098 this.captionPackets_ = [];
4099
4100 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
4101 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
4102 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
4103 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
4104 ];
4105
4106 this.reset();
4107
4108 // forward data and done events from CCs to this CaptionStream
4109 this.ccStreams_.forEach(function (cc) {
4110 cc.on('data', this.trigger.bind(this, 'data'));
4111 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
4112 cc.on('done', this.trigger.bind(this, 'done'));
4113 }, this);
4114 };
4115
4116 CaptionStream.prototype = new stream();
4117 CaptionStream.prototype.push = function (event) {
4118 var sei, userData, newCaptionPackets;
4119
4120 // only examine SEI NALs
4121 if (event.nalUnitType !== 'sei_rbsp') {
4122 return;
4123 }
4124
4125 // parse the sei
4126 sei = captionPacketParser.parseSei(event.escapedRBSP);
4127
4128 // ignore everything but user_data_registered_itu_t_t35
4129 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
4130 return;
4131 }
4132
4133 // parse out the user data payload
4134 userData = captionPacketParser.parseUserData(sei);
4135
4136 // ignore unrecognized userData
4137 if (!userData) {
4138 return;
4139 }
4140
4141 // Sometimes, the same segment # will be downloaded twice. To stop the
4142 // caption data from being processed twice, we track the latest dts we've
4143 // received and ignore everything with a dts before that. However, since
4144 // data for a specific dts can be split across packets on either side of
4145 // a segment boundary, we need to make sure we *don't* ignore the packets
4146 // from the *next* segment that have dts === this.latestDts_. By constantly
4147 // tracking the number of packets received with dts === this.latestDts_, we
4148 // know how many should be ignored once we start receiving duplicates.
4149 if (event.dts < this.latestDts_) {
4150 // We've started getting older data, so set the flag.
4151 this.ignoreNextEqualDts_ = true;
4152 return;
4153 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
4154 this.numSameDts_--;
4155 if (!this.numSameDts_) {
4156 // We've received the last duplicate packet, time to start processing again
4157 this.ignoreNextEqualDts_ = false;
4158 }
4159 return;
4160 }
4161
4162 // parse out CC data packets and save them for later
4163 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
4164 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
4165 if (this.latestDts_ !== event.dts) {
4166 this.numSameDts_ = 0;
4167 }
4168 this.numSameDts_++;
4169 this.latestDts_ = event.dts;
4170 };
4171
4172 CaptionStream.prototype.flushCCStreams = function (flushType) {
4173 this.ccStreams_.forEach(function (cc) {
4174 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
4175 }, this);
4176 };
4177
4178 CaptionStream.prototype.flushStream = function (flushType) {
4179 // make sure we actually parsed captions before proceeding
4180 if (!this.captionPackets_.length) {
4181 this.flushCCStreams(flushType);
4182 return;
4183 }
4184
4185 // In Chrome, the Array#sort function is not stable so add a
4186 // presortIndex that we can use to ensure we get a stable-sort
4187 this.captionPackets_.forEach(function (elem, idx) {
4188 elem.presortIndex = idx;
4189 });
4190
4191 // sort caption byte-pairs based on their PTS values
4192 this.captionPackets_.sort(function (a, b) {
4193 if (a.pts === b.pts) {
4194 return a.presortIndex - b.presortIndex;
4195 }
4196 return a.pts - b.pts;
4197 });
4198
4199 this.captionPackets_.forEach(function (packet) {
4200 if (packet.type < 2) {
4201 // Dispatch packet to the right Cea608Stream
4202 this.dispatchCea608Packet(packet);
4203 }
4204 // this is where an 'else' would go for a dispatching packets
4205 // to a theoretical Cea708Stream that handles SERVICEn data
4206 }, this);
4207
4208 this.captionPackets_.length = 0;
4209 this.flushCCStreams(flushType);
4210 };
4211
4212 CaptionStream.prototype.flush = function () {
4213 return this.flushStream('flush');
4214 };
4215
4216 // Only called if handling partial data
4217 CaptionStream.prototype.partialFlush = function () {
4218 return this.flushStream('partialFlush');
4219 };
4220
4221 CaptionStream.prototype.reset = function () {
4222 this.latestDts_ = null;
4223 this.ignoreNextEqualDts_ = false;
4224 this.numSameDts_ = 0;
4225 this.activeCea608Channel_ = [null, null];
4226 this.ccStreams_.forEach(function (ccStream) {
4227 ccStream.reset();
4228 });
4229 };
4230
4231 // From the CEA-608 spec:
4232 /*
4233 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
4234 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
4235 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
4236 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
4237 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
4238 * to switch to captioning or Text.
4239 */
4240 // With that in mind, we ignore any data between an XDS control code and a
4241 // subsequent closed-captioning control code.
4242 CaptionStream.prototype.dispatchCea608Packet = function (packet) {
4243 // NOTE: packet.type is the CEA608 field
4244 if (this.setsTextOrXDSActive(packet)) {
4245 this.activeCea608Channel_[packet.type] = null;
4246 } else if (this.setsChannel1Active(packet)) {
4247 this.activeCea608Channel_[packet.type] = 0;
4248 } else if (this.setsChannel2Active(packet)) {
4249 this.activeCea608Channel_[packet.type] = 1;
4250 }
4251 if (this.activeCea608Channel_[packet.type] === null) {
4252 // If we haven't received anything to set the active channel, or the
4253 // packets are Text/XDS data, discard the data; we don't want jumbled
4254 // captions
4255 return;
4256 }
4257 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
4258 };
4259
4260 CaptionStream.prototype.setsChannel1Active = function (packet) {
4261 return (packet.ccData & 0x7800) === 0x1000;
4262 };
4263 CaptionStream.prototype.setsChannel2Active = function (packet) {
4264 return (packet.ccData & 0x7800) === 0x1800;
4265 };
4266 CaptionStream.prototype.setsTextOrXDSActive = function (packet) {
4267 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
4268 };
4269
4270 // ----------------------
4271 // Session to Application
4272 // ----------------------
4273
4274 // This hash maps non-ASCII, special, and extended character codes to their
4275 // proper Unicode equivalent. The first keys that are only a single byte
4276 // are the non-standard ASCII characters, which simply map the CEA608 byte
4277 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
4278 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
4279 // can be performed regardless of the field and data channel on which the
4280 // character code was received.
4281 var CHARACTER_TRANSLATION = {
4282 0x2a: 0xe1, // á
4283 0x5c: 0xe9, // é
4284 0x5e: 0xed, // í
4285 0x5f: 0xf3, // ó
4286 0x60: 0xfa, // ú
4287 0x7b: 0xe7, // ç
4288 0x7c: 0xf7, // ÷
4289 0x7d: 0xd1, // Ñ
4290 0x7e: 0xf1, // ñ
4291 0x7f: 0x2588, // █
4292 0x0130: 0xae, // ®
4293 0x0131: 0xb0, // °
4294 0x0132: 0xbd, // ½
4295 0x0133: 0xbf, // ¿
4296 0x0134: 0x2122, // ™
4297 0x0135: 0xa2, // ¢
4298 0x0136: 0xa3, // £
4299 0x0137: 0x266a, // ♪
4300 0x0138: 0xe0, // à
4301 0x0139: 0xa0, //
4302 0x013a: 0xe8, // è
4303 0x013b: 0xe2, // â
4304 0x013c: 0xea, // ê
4305 0x013d: 0xee, // î
4306 0x013e: 0xf4, // ô
4307 0x013f: 0xfb, // û
4308 0x0220: 0xc1, // Á
4309 0x0221: 0xc9, // É
4310 0x0222: 0xd3, // Ó
4311 0x0223: 0xda, // Ú
4312 0x0224: 0xdc, // Ü
4313 0x0225: 0xfc, // ü
4314 0x0226: 0x2018, // ‘
4315 0x0227: 0xa1, // ¡
4316 0x0228: 0x2a, // *
4317 0x0229: 0x27, // '
4318 0x022a: 0x2014, // —
4319 0x022b: 0xa9, // ©
4320 0x022c: 0x2120, // ℠
4321 0x022d: 0x2022, // •
4322 0x022e: 0x201c, // “
4323 0x022f: 0x201d, // ”
4324 0x0230: 0xc0, // À
4325 0x0231: 0xc2, // Â
4326 0x0232: 0xc7, // Ç
4327 0x0233: 0xc8, // È
4328 0x0234: 0xca, // Ê
4329 0x0235: 0xcb, // Ë
4330 0x0236: 0xeb, // ë
4331 0x0237: 0xce, // Î
4332 0x0238: 0xcf, // Ï
4333 0x0239: 0xef, // ï
4334 0x023a: 0xd4, // Ô
4335 0x023b: 0xd9, // Ù
4336 0x023c: 0xf9, // ù
4337 0x023d: 0xdb, // Û
4338 0x023e: 0xab, // «
4339 0x023f: 0xbb, // »
4340 0x0320: 0xc3, // Ã
4341 0x0321: 0xe3, // ã
4342 0x0322: 0xcd, // Í
4343 0x0323: 0xcc, // Ì
4344 0x0324: 0xec, // ì
4345 0x0325: 0xd2, // Ò
4346 0x0326: 0xf2, // ò
4347 0x0327: 0xd5, // Õ
4348 0x0328: 0xf5, // õ
4349 0x0329: 0x7b, // {
4350 0x032a: 0x7d, // }
4351 0x032b: 0x5c, // \
4352 0x032c: 0x5e, // ^
4353 0x032d: 0x5f, // _
4354 0x032e: 0x7c, // |
4355 0x032f: 0x7e, // ~
4356 0x0330: 0xc4, // Ä
4357 0x0331: 0xe4, // ä
4358 0x0332: 0xd6, // Ö
4359 0x0333: 0xf6, // ö
4360 0x0334: 0xdf, // ß
4361 0x0335: 0xa5, // ¥
4362 0x0336: 0xa4, // ¤
4363 0x0337: 0x2502, // │
4364 0x0338: 0xc5, // Å
4365 0x0339: 0xe5, // å
4366 0x033a: 0xd8, // Ø
4367 0x033b: 0xf8, // ø
4368 0x033c: 0x250c, // ┌
4369 0x033d: 0x2510, // ┐
4370 0x033e: 0x2514, // └
4371 0x033f: 0x2518 // ┘
4372 };
4373
4374 var getCharFromCode = function getCharFromCode(code) {
4375 if (code === null) {
4376 return '';
4377 }
4378 code = CHARACTER_TRANSLATION[code] || code;
4379 return String.fromCharCode(code);
4380 };
4381
4382 // the index of the last row in a CEA-608 display buffer
4383 var BOTTOM_ROW = 14;
4384
4385 // This array is used for mapping PACs -> row #, since there's no way of
4386 // getting it through bit logic.
4387 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
4388
4389 // CEA-608 captions are rendered onto a 34x15 matrix of character
4390 // cells. The "bottom" row is the last element in the outer array.
4391 var createDisplayBuffer = function createDisplayBuffer() {
4392 var result = [],
4393 i = BOTTOM_ROW + 1;
4394 while (i--) {
4395 result.push('');
4396 }
4397 return result;
4398 };
4399
4400 var Cea608Stream = function Cea608Stream(field, dataChannel) {
4401 Cea608Stream.prototype.init.call(this);
4402
4403 this.field_ = field || 0;
4404 this.dataChannel_ = dataChannel || 0;
4405
4406 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
4407
4408 this.setConstants();
4409 this.reset();
4410
4411 this.push = function (packet) {
4412 var data, swap, char0, char1, text;
4413 // remove the parity bits
4414 data = packet.ccData & 0x7f7f;
4415
4416 // ignore duplicate control codes; the spec demands they're sent twice
4417 if (data === this.lastControlCode_) {
4418 this.lastControlCode_ = null;
4419 return;
4420 }
4421
4422 // Store control codes
4423 if ((data & 0xf000) === 0x1000) {
4424 this.lastControlCode_ = data;
4425 } else if (data !== this.PADDING_) {
4426 this.lastControlCode_ = null;
4427 }
4428
4429 char0 = data >>> 8;
4430 char1 = data & 0xff;
4431
4432 if (data === this.PADDING_) {
4433 return;
4434 } else if (data === this.RESUME_CAPTION_LOADING_) {
4435 this.mode_ = 'popOn';
4436 } else if (data === this.END_OF_CAPTION_) {
4437 // If an EOC is received while in paint-on mode, the displayed caption
4438 // text should be swapped to non-displayed memory as if it was a pop-on
4439 // caption. Because of that, we should explicitly switch back to pop-on
4440 // mode
4441 this.mode_ = 'popOn';
4442 this.clearFormatting(packet.pts);
4443 // if a caption was being displayed, it's gone now
4444 this.flushDisplayed(packet.pts);
4445
4446 // flip memory
4447 swap = this.displayed_;
4448 this.displayed_ = this.nonDisplayed_;
4449 this.nonDisplayed_ = swap;
4450
4451 // start measuring the time to display the caption
4452 this.startPts_ = packet.pts;
4453 } else if (data === this.ROLL_UP_2_ROWS_) {
4454 this.rollUpRows_ = 2;
4455 this.setRollUp(packet.pts);
4456 } else if (data === this.ROLL_UP_3_ROWS_) {
4457 this.rollUpRows_ = 3;
4458 this.setRollUp(packet.pts);
4459 } else if (data === this.ROLL_UP_4_ROWS_) {
4460 this.rollUpRows_ = 4;
4461 this.setRollUp(packet.pts);
4462 } else if (data === this.CARRIAGE_RETURN_) {
4463 this.clearFormatting(packet.pts);
4464 this.flushDisplayed(packet.pts);
4465 this.shiftRowsUp_();
4466 this.startPts_ = packet.pts;
4467 } else if (data === this.BACKSPACE_) {
4468 if (this.mode_ === 'popOn') {
4469 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
4470 } else {
4471 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
4472 }
4473 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
4474 this.flushDisplayed(packet.pts);
4475 this.displayed_ = createDisplayBuffer();
4476 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
4477 this.nonDisplayed_ = createDisplayBuffer();
4478 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
4479 if (this.mode_ !== 'paintOn') {
4480 // NOTE: This should be removed when proper caption positioning is
4481 // implemented
4482 this.flushDisplayed(packet.pts);
4483 this.displayed_ = createDisplayBuffer();
4484 }
4485 this.mode_ = 'paintOn';
4486 this.startPts_ = packet.pts;
4487
4488 // Append special characters to caption text
4489 } else if (this.isSpecialCharacter(char0, char1)) {
4490 // Bitmask char0 so that we can apply character transformations
4491 // regardless of field and data channel.
4492 // Then byte-shift to the left and OR with char1 so we can pass the
4493 // entire character code to `getCharFromCode`.
4494 char0 = (char0 & 0x03) << 8;
4495 text = getCharFromCode(char0 | char1);
4496 this[this.mode_](packet.pts, text);
4497 this.column_++;
4498
4499 // Append extended characters to caption text
4500 } else if (this.isExtCharacter(char0, char1)) {
4501 // Extended characters always follow their "non-extended" equivalents.
4502 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
4503 // decoders are supposed to drop the "è", while compliant decoders
4504 // backspace the "e" and insert "è".
4505
4506 // Delete the previous character
4507 if (this.mode_ === 'popOn') {
4508 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
4509 } else {
4510 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
4511 }
4512
4513 // Bitmask char0 so that we can apply character transformations
4514 // regardless of field and data channel.
4515 // Then byte-shift to the left and OR with char1 so we can pass the
4516 // entire character code to `getCharFromCode`.
4517 char0 = (char0 & 0x03) << 8;
4518 text = getCharFromCode(char0 | char1);
4519 this[this.mode_](packet.pts, text);
4520 this.column_++;
4521
4522 // Process mid-row codes
4523 } else if (this.isMidRowCode(char0, char1)) {
4524 // Attributes are not additive, so clear all formatting
4525 this.clearFormatting(packet.pts);
4526
4527 // According to the standard, mid-row codes
4528 // should be replaced with spaces, so add one now
4529 this[this.mode_](packet.pts, ' ');
4530 this.column_++;
4531
4532 if ((char1 & 0xe) === 0xe) {
4533 this.addFormatting(packet.pts, ['i']);
4534 }
4535
4536 if ((char1 & 0x1) === 0x1) {
4537 this.addFormatting(packet.pts, ['u']);
4538 }
4539
4540 // Detect offset control codes and adjust cursor
4541 } else if (this.isOffsetControlCode(char0, char1)) {
4542 // Cursor position is set by indent PAC (see below) in 4-column
4543 // increments, with an additional offset code of 1-3 to reach any
4544 // of the 32 columns specified by CEA-608. So all we need to do
4545 // here is increment the column cursor by the given offset.
4546 this.column_ += char1 & 0x03;
4547
4548 // Detect PACs (Preamble Address Codes)
4549 } else if (this.isPAC(char0, char1)) {
4550
4551 // There's no logic for PAC -> row mapping, so we have to just
4552 // find the row code in an array and use its index :(
4553 var row = ROWS.indexOf(data & 0x1f20);
4554
4555 // Configure the caption window if we're in roll-up mode
4556 if (this.mode_ === 'rollUp') {
4557 // This implies that the base row is incorrectly set.
4558 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
4559 // of roll-up rows set.
4560 if (row - this.rollUpRows_ + 1 < 0) {
4561 row = this.rollUpRows_ - 1;
4562 }
4563
4564 this.setRollUp(packet.pts, row);
4565 }
4566
4567 if (row !== this.row_) {
4568 // formatting is only persistent for current row
4569 this.clearFormatting(packet.pts);
4570 this.row_ = row;
4571 }
4572 // All PACs can apply underline, so detect and apply
4573 // (All odd-numbered second bytes set underline)
4574 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
4575 this.addFormatting(packet.pts, ['u']);
4576 }
4577
4578 if ((data & 0x10) === 0x10) {
4579 // We've got an indent level code. Each successive even number
4580 // increments the column cursor by 4, so we can get the desired
4581 // column position by bit-shifting to the right (to get n/2)
4582 // and multiplying by 4.
4583 this.column_ = ((data & 0xe) >> 1) * 4;
4584 }
4585
4586 if (this.isColorPAC(char1)) {
4587 // it's a color code, though we only support white, which
4588 // can be either normal or italicized. white italics can be
4589 // either 0x4e or 0x6e depending on the row, so we just
4590 // bitwise-and with 0xe to see if italics should be turned on
4591 if ((char1 & 0xe) === 0xe) {
4592 this.addFormatting(packet.pts, ['i']);
4593 }
4594 }
4595
4596 // We have a normal character in char0, and possibly one in char1
4597 } else if (this.isNormalChar(char0)) {
4598 if (char1 === 0x00) {
4599 char1 = null;
4600 }
4601 text = getCharFromCode(char0);
4602 text += getCharFromCode(char1);
4603 this[this.mode_](packet.pts, text);
4604 this.column_ += text.length;
4605 } // finish data processing
4606 };
4607 };
4608 Cea608Stream.prototype = new stream();
4609 // Trigger a cue point that captures the current state of the
4610 // display buffer
4611 Cea608Stream.prototype.flushDisplayed = function (pts) {
4612 var content = this.displayed_
4613 // remove spaces from the start and end of the string
4614 .map(function (row) {
4615 try {
4616 return row.trim();
4617 } catch (e) {
4618 // Ordinarily, this shouldn't happen. However, caption
4619 // parsing errors should not throw exceptions and
4620 // break playback.
4621 // eslint-disable-next-line no-console
4622 console.error('Skipping malformed caption.');
4623 return '';
4624 }
4625 })
4626 // combine all text rows to display in one cue
4627 .join('\n')
4628 // and remove blank rows from the start and end, but not the middle
4629 .replace(/^\n+|\n+$/g, '');
4630
4631 if (content.length) {
4632 this.trigger('data', {
4633 startPts: this.startPts_,
4634 endPts: pts,
4635 text: content,
4636 stream: this.name_
4637 });
4638 }
4639 };
4640
4641 /**
4642 * Zero out the data, used for startup and on seek
4643 */
4644 Cea608Stream.prototype.reset = function () {
4645 this.mode_ = 'popOn';
4646 // When in roll-up mode, the index of the last row that will
4647 // actually display captions. If a caption is shifted to a row
4648 // with a lower index than this, it is cleared from the display
4649 // buffer
4650 this.topRow_ = 0;
4651 this.startPts_ = 0;
4652 this.displayed_ = createDisplayBuffer();
4653 this.nonDisplayed_ = createDisplayBuffer();
4654 this.lastControlCode_ = null;
4655
4656 // Track row and column for proper line-breaking and spacing
4657 this.column_ = 0;
4658 this.row_ = BOTTOM_ROW;
4659 this.rollUpRows_ = 2;
4660
4661 // This variable holds currently-applied formatting
4662 this.formatting_ = [];
4663 };
4664
4665 /**
4666 * Sets up control code and related constants for this instance
4667 */
4668 Cea608Stream.prototype.setConstants = function () {
4669 // The following attributes have these uses:
4670 // ext_ : char0 for mid-row codes, and the base for extended
4671 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
4672 // extended codes)
4673 // control_: char0 for control codes, except byte-shifted to the
4674 // left so that we can do this.control_ | CONTROL_CODE
4675 // offset_: char0 for tab offset codes
4676 //
4677 // It's also worth noting that control codes, and _only_ control codes,
4678 // differ between field 1 and field2. Field 2 control codes are always
4679 // their field 1 value plus 1. That's why there's the "| field" on the
4680 // control value.
4681 if (this.dataChannel_ === 0) {
4682 this.BASE_ = 0x10;
4683 this.EXT_ = 0x11;
4684 this.CONTROL_ = (0x14 | this.field_) << 8;
4685 this.OFFSET_ = 0x17;
4686 } else if (this.dataChannel_ === 1) {
4687 this.BASE_ = 0x18;
4688 this.EXT_ = 0x19;
4689 this.CONTROL_ = (0x1c | this.field_) << 8;
4690 this.OFFSET_ = 0x1f;
4691 }
4692
4693 // Constants for the LSByte command codes recognized by Cea608Stream. This
4694 // list is not exhaustive. For a more comprehensive listing and semantics see
4695 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
4696 // Padding
4697 this.PADDING_ = 0x0000;
4698 // Pop-on Mode
4699 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
4700 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
4701 // Roll-up Mode
4702 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
4703 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
4704 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
4705 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
4706 // paint-on mode
4707 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
4708 // Erasure
4709 this.BACKSPACE_ = this.CONTROL_ | 0x21;
4710 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
4711 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
4712 };
4713
4714 /**
4715 * Detects if the 2-byte packet data is a special character
4716 *
4717 * Special characters have a second byte in the range 0x30 to 0x3f,
4718 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
4719 * data channel 2).
4720 *
4721 * @param {Integer} char0 The first byte
4722 * @param {Integer} char1 The second byte
4723 * @return {Boolean} Whether the 2 bytes are an special character
4724 */
4725 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
4726 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
4727 };
4728
4729 /**
4730 * Detects if the 2-byte packet data is an extended character
4731 *
4732 * Extended characters have a second byte in the range 0x20 to 0x3f,
4733 * with the first byte being 0x12 or 0x13 (for data channel 1) or
4734 * 0x1a or 0x1b (for data channel 2).
4735 *
4736 * @param {Integer} char0 The first byte
4737 * @param {Integer} char1 The second byte
4738 * @return {Boolean} Whether the 2 bytes are an extended character
4739 */
4740 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
4741 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
4742 };
4743
4744 /**
4745 * Detects if the 2-byte packet is a mid-row code
4746 *
4747 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
4748 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
4749 * channel 2).
4750 *
4751 * @param {Integer} char0 The first byte
4752 * @param {Integer} char1 The second byte
4753 * @return {Boolean} Whether the 2 bytes are a mid-row code
4754 */
4755 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
4756 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
4757 };
4758
4759 /**
4760 * Detects if the 2-byte packet is an offset control code
4761 *
4762 * Offset control codes have a second byte in the range 0x21 to 0x23,
4763 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
4764 * data channel 2).
4765 *
4766 * @param {Integer} char0 The first byte
4767 * @param {Integer} char1 The second byte
4768 * @return {Boolean} Whether the 2 bytes are an offset control code
4769 */
4770 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
4771 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
4772 };
4773
4774 /**
4775 * Detects if the 2-byte packet is a Preamble Address Code
4776 *
4777 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
4778 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
4779 * range 0x40 to 0x7f.
4780 *
4781 * @param {Integer} char0 The first byte
4782 * @param {Integer} char1 The second byte
4783 * @return {Boolean} Whether the 2 bytes are a PAC
4784 */
4785 Cea608Stream.prototype.isPAC = function (char0, char1) {
4786 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
4787 };
4788
4789 /**
4790 * Detects if a packet's second byte is in the range of a PAC color code
4791 *
4792 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
4793 * 0x60 to 0x6f.
4794 *
4795 * @param {Integer} char1 The second byte
4796 * @return {Boolean} Whether the byte is a color PAC
4797 */
4798 Cea608Stream.prototype.isColorPAC = function (char1) {
4799 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
4800 };
4801
4802 /**
4803 * Detects if a single byte is in the range of a normal character
4804 *
4805 * Normal text bytes are in the range 0x20 to 0x7f.
4806 *
4807 * @param {Integer} char The byte
4808 * @return {Boolean} Whether the byte is a normal character
4809 */
4810 Cea608Stream.prototype.isNormalChar = function (char) {
4811 return char >= 0x20 && char <= 0x7f;
4812 };
4813
4814 /**
4815 * Configures roll-up
4816 *
4817 * @param {Integer} pts Current PTS
4818 * @param {Integer} newBaseRow Used by PACs to slide the current window to
4819 * a new position
4820 */
4821 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
4822 // Reset the base row to the bottom row when switching modes
4823 if (this.mode_ !== 'rollUp') {
4824 this.row_ = BOTTOM_ROW;
4825 this.mode_ = 'rollUp';
4826 // Spec says to wipe memories when switching to roll-up
4827 this.flushDisplayed(pts);
4828 this.nonDisplayed_ = createDisplayBuffer();
4829 this.displayed_ = createDisplayBuffer();
4830 }
4831
4832 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
4833 // move currently displayed captions (up or down) to the new base row
4834 for (var i = 0; i < this.rollUpRows_; i++) {
4835 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
4836 this.displayed_[this.row_ - i] = '';
4837 }
4838 }
4839
4840 if (newBaseRow === undefined) {
4841 newBaseRow = this.row_;
4842 }
4843
4844 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
4845 };
4846
4847 // Adds the opening HTML tag for the passed character to the caption text,
4848 // and keeps track of it for later closing
4849 Cea608Stream.prototype.addFormatting = function (pts, format) {
4850 this.formatting_ = this.formatting_.concat(format);
4851 var text = format.reduce(function (text, format) {
4852 return text + '<' + format + '>';
4853 }, '');
4854 this[this.mode_](pts, text);
4855 };
4856
4857 // Adds HTML closing tags for current formatting to caption text and
4858 // clears remembered formatting
4859 Cea608Stream.prototype.clearFormatting = function (pts) {
4860 if (!this.formatting_.length) {
4861 return;
4862 }
4863 var text = this.formatting_.reverse().reduce(function (text, format) {
4864 return text + '</' + format + '>';
4865 }, '');
4866 this.formatting_ = [];
4867 this[this.mode_](pts, text);
4868 };
4869
4870 // Mode Implementations
4871 Cea608Stream.prototype.popOn = function (pts, text) {
4872 var baseRow = this.nonDisplayed_[this.row_];
4873
4874 // buffer characters
4875 baseRow += text;
4876 this.nonDisplayed_[this.row_] = baseRow;
4877 };
4878
4879 Cea608Stream.prototype.rollUp = function (pts, text) {
4880 var baseRow = this.displayed_[this.row_];
4881
4882 baseRow += text;
4883 this.displayed_[this.row_] = baseRow;
4884 };
4885
4886 Cea608Stream.prototype.shiftRowsUp_ = function () {
4887 var i;
4888 // clear out inactive rows
4889 for (i = 0; i < this.topRow_; i++) {
4890 this.displayed_[i] = '';
4891 }
4892 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
4893 this.displayed_[i] = '';
4894 }
4895 // shift displayed rows up
4896 for (i = this.topRow_; i < this.row_; i++) {
4897 this.displayed_[i] = this.displayed_[i + 1];
4898 }
4899 // clear out the bottom row
4900 this.displayed_[this.row_] = '';
4901 };
4902
4903 Cea608Stream.prototype.paintOn = function (pts, text) {
4904 var baseRow = this.displayed_[this.row_];
4905
4906 baseRow += text;
4907 this.displayed_[this.row_] = baseRow;
4908 };
4909
4910 // exports
4911 var captionStream = {
4912 CaptionStream: CaptionStream,
4913 Cea608Stream: Cea608Stream
4914 };
4915
4916 /**
4917 * mux.js
4918 *
4919 * Copyright (c) Brightcove
4920 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4921 */
4922
4923 var streamTypes = {
4924 H264_STREAM_TYPE: 0x1B,
4925 ADTS_STREAM_TYPE: 0x0F,
4926 METADATA_STREAM_TYPE: 0x15
4927 };
4928
4929 var MAX_TS = 8589934592;
4930
4931 var RO_THRESH = 4294967296;
4932
4933 var TYPE_SHARED = 'shared';
4934
4935 var handleRollover = function handleRollover(value, reference) {
4936 var direction = 1;
4937
4938 if (value > reference) {
4939 // If the current timestamp value is greater than our reference timestamp and we detect a
4940 // timestamp rollover, this means the roll over is happening in the opposite direction.
4941 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
4942 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
4943 // rollover point. In loading this segment, the timestamp values will be very large,
4944 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
4945 // the time stamp to be `value - 2^33`.
4946 direction = -1;
4947 }
4948
4949 // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
4950 // cause an incorrect adjustment.
4951 while (Math.abs(reference - value) > RO_THRESH) {
4952 value += direction * MAX_TS;
4953 }
4954
4955 return value;
4956 };
4957
4958 var TimestampRolloverStream = function TimestampRolloverStream(type) {
4959 var lastDTS, referenceDTS;
4960
4961 TimestampRolloverStream.prototype.init.call(this);
4962
4963 // The "shared" type is used in cases where a stream will contain muxed
4964 // video and audio. We could use `undefined` here, but having a string
4965 // makes debugging a little clearer.
4966 this.type_ = type || TYPE_SHARED;
4967
4968 this.push = function (data) {
4969
4970 // Any "shared" rollover streams will accept _all_ data. Otherwise,
4971 // streams will only accept data that matches their type.
4972 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
4973 return;
4974 }
4975
4976 if (referenceDTS === undefined) {
4977 referenceDTS = data.dts;
4978 }
4979
4980 data.dts = handleRollover(data.dts, referenceDTS);
4981 data.pts = handleRollover(data.pts, referenceDTS);
4982
4983 lastDTS = data.dts;
4984
4985 this.trigger('data', data);
4986 };
4987
4988 this.flush = function () {
4989 referenceDTS = lastDTS;
4990 this.trigger('done');
4991 };
4992
4993 this.endTimeline = function () {
4994 this.flush();
4995 this.trigger('endedtimeline');
4996 };
4997
4998 this.discontinuity = function () {
4999 referenceDTS = void 0;
5000 lastDTS = void 0;
5001 };
5002
5003 this.reset = function () {
5004 this.discontinuity();
5005 this.trigger('reset');
5006 };
5007 };
5008
5009 TimestampRolloverStream.prototype = new stream();
5010
5011 var timestampRolloverStream = {
5012 TimestampRolloverStream: TimestampRolloverStream,
5013 handleRollover: handleRollover
5014 };
5015
5016 var percentEncode = function percentEncode(bytes, start, end) {
5017 var i,
5018 result = '';
5019 for (i = start; i < end; i++) {
5020 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
5021 }
5022 return result;
5023 },
5024
5025
5026 // return the string representation of the specified byte range,
5027 // interpreted as UTf-8.
5028 parseUtf8 = function parseUtf8(bytes, start, end) {
5029 return decodeURIComponent(percentEncode(bytes, start, end));
5030 },
5031
5032
5033 // return the string representation of the specified byte range,
5034 // interpreted as ISO-8859-1.
5035 parseIso88591 = function parseIso88591(bytes, start, end) {
5036 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
5037 },
5038 parseSyncSafeInteger = function parseSyncSafeInteger(data) {
5039 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
5040 },
5041 tagParsers = {
5042 TXXX: function TXXX(tag) {
5043 var i;
5044 if (tag.data[0] !== 3) {
5045 // ignore frames with unrecognized character encodings
5046 return;
5047 }
5048
5049 for (i = 1; i < tag.data.length; i++) {
5050 if (tag.data[i] === 0) {
5051 // parse the text fields
5052 tag.description = parseUtf8(tag.data, 1, i);
5053 // do not include the null terminator in the tag value
5054 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
5055 break;
5056 }
5057 }
5058 tag.data = tag.value;
5059 },
5060 WXXX: function WXXX(tag) {
5061 var i;
5062 if (tag.data[0] !== 3) {
5063 // ignore frames with unrecognized character encodings
5064 return;
5065 }
5066
5067 for (i = 1; i < tag.data.length; i++) {
5068 if (tag.data[i] === 0) {
5069 // parse the description and URL fields
5070 tag.description = parseUtf8(tag.data, 1, i);
5071 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
5072 break;
5073 }
5074 }
5075 },
5076 PRIV: function PRIV(tag) {
5077 var i;
5078
5079 for (i = 0; i < tag.data.length; i++) {
5080 if (tag.data[i] === 0) {
5081 // parse the description and URL fields
5082 tag.owner = parseIso88591(tag.data, 0, i);
5083 break;
5084 }
5085 }
5086 tag.privateData = tag.data.subarray(i + 1);
5087 tag.data = tag.privateData;
5088 }
5089 },
5090 _MetadataStream;
5091
5092 _MetadataStream = function MetadataStream(options) {
5093 var settings = {
5094 debug: !!(options && options.debug),
5095
5096 // the bytes of the program-level descriptor field in MP2T
5097 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
5098 // program element descriptors"
5099 descriptor: options && options.descriptor
5100 },
5101
5102
5103 // the total size in bytes of the ID3 tag being parsed
5104 tagSize = 0,
5105
5106
5107 // tag data that is not complete enough to be parsed
5108 buffer = [],
5109
5110
5111 // the total number of bytes currently in the buffer
5112 bufferSize = 0,
5113 i;
5114
5115 _MetadataStream.prototype.init.call(this);
5116
5117 // calculate the text track in-band metadata track dispatch type
5118 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
5119 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
5120 if (settings.descriptor) {
5121 for (i = 0; i < settings.descriptor.length; i++) {
5122 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
5123 }
5124 }
5125
5126 this.push = function (chunk) {
5127 var tag, frameStart, frameSize, frame, i, frameHeader;
5128 if (chunk.type !== 'timed-metadata') {
5129 return;
5130 }
5131
5132 // if data_alignment_indicator is set in the PES header,
5133 // we must have the start of a new ID3 tag. Assume anything
5134 // remaining in the buffer was malformed and throw it out
5135 if (chunk.dataAlignmentIndicator) {
5136 bufferSize = 0;
5137 buffer.length = 0;
5138 }
5139
5140 // ignore events that don't look like ID3 data
5141 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
5142 if (settings.debug) {
5143 // eslint-disable-next-line no-console
5144 console.log('Skipping unrecognized metadata packet');
5145 }
5146 return;
5147 }
5148
5149 // add this chunk to the data we've collected so far
5150
5151 buffer.push(chunk);
5152 bufferSize += chunk.data.byteLength;
5153
5154 // grab the size of the entire frame from the ID3 header
5155 if (buffer.length === 1) {
5156 // the frame size is transmitted as a 28-bit integer in the
5157 // last four bytes of the ID3 header.
5158 // The most significant bit of each byte is dropped and the
5159 // results concatenated to recover the actual value.
5160 tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
5161
5162 // ID3 reports the tag size excluding the header but it's more
5163 // convenient for our comparisons to include it
5164 tagSize += 10;
5165 }
5166
5167 // if the entire frame has not arrived, wait for more data
5168 if (bufferSize < tagSize) {
5169 return;
5170 }
5171
5172 // collect the entire frame so it can be parsed
5173 tag = {
5174 data: new Uint8Array(tagSize),
5175 frames: [],
5176 pts: buffer[0].pts,
5177 dts: buffer[0].dts
5178 };
5179 for (i = 0; i < tagSize;) {
5180 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
5181 i += buffer[0].data.byteLength;
5182 bufferSize -= buffer[0].data.byteLength;
5183 buffer.shift();
5184 }
5185
5186 // find the start of the first frame and the end of the tag
5187 frameStart = 10;
5188 if (tag.data[5] & 0x40) {
5189 // advance the frame start past the extended header
5190 frameStart += 4; // header size field
5191 frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
5192
5193 // clip any padding off the end
5194 tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
5195 }
5196
5197 // parse one or more ID3 frames
5198 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
5199 do {
5200 // determine the number of bytes in this frame
5201 frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
5202 if (frameSize < 1) {
5203 // eslint-disable-next-line no-console
5204 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
5205 }
5206 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
5207
5208 frame = {
5209 id: frameHeader,
5210 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
5211 };
5212 frame.key = frame.id;
5213 if (tagParsers[frame.id]) {
5214 tagParsers[frame.id](frame);
5215
5216 // handle the special PRIV frame used to indicate the start
5217 // time for raw AAC data
5218 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
5219 var d = frame.data,
5220 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
5221
5222 size *= 4;
5223 size += d[7] & 0x03;
5224 frame.timeStamp = size;
5225 // in raw AAC, all subsequent data will be timestamped based
5226 // on the value of this frame
5227 // we couldn't have known the appropriate pts and dts before
5228 // parsing this ID3 tag so set those values now
5229 if (tag.pts === undefined && tag.dts === undefined) {
5230 tag.pts = frame.timeStamp;
5231 tag.dts = frame.timeStamp;
5232 }
5233 this.trigger('timestamp', frame);
5234 }
5235 }
5236 tag.frames.push(frame);
5237
5238 frameStart += 10; // advance past the frame header
5239 frameStart += frameSize; // advance past the frame body
5240 } while (frameStart < tagSize);
5241 this.trigger('data', tag);
5242 };
5243 };
5244 _MetadataStream.prototype = new stream();
5245
5246 var metadataStream = _MetadataStream;
5247
5248 var TimestampRolloverStream$1 = timestampRolloverStream.TimestampRolloverStream;
5249
5250 // object types
5251 var _TransportPacketStream, _TransportParseStream, _ElementaryStream;
5252
5253 // constants
5254 var MP2T_PACKET_LENGTH = 188,
5255
5256 // bytes
5257 SYNC_BYTE = 0x47;
5258
5259 /**
5260 * Splits an incoming stream of binary data into MPEG-2 Transport
5261 * Stream packets.
5262 */
5263 _TransportPacketStream = function TransportPacketStream() {
5264 var buffer = new Uint8Array(MP2T_PACKET_LENGTH),
5265 bytesInBuffer = 0;
5266
5267 _TransportPacketStream.prototype.init.call(this);
5268
5269 // Deliver new bytes to the stream.
5270
5271 /**
5272 * Split a stream of data into M2TS packets
5273 **/
5274 this.push = function (bytes) {
5275 var startIndex = 0,
5276 endIndex = MP2T_PACKET_LENGTH,
5277 everything;
5278
5279 // If there are bytes remaining from the last segment, prepend them to the
5280 // bytes that were pushed in
5281 if (bytesInBuffer) {
5282 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
5283 everything.set(buffer.subarray(0, bytesInBuffer));
5284 everything.set(bytes, bytesInBuffer);
5285 bytesInBuffer = 0;
5286 } else {
5287 everything = bytes;
5288 }
5289
5290 // While we have enough data for a packet
5291 while (endIndex < everything.byteLength) {
5292 // Look for a pair of start and end sync bytes in the data..
5293 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
5294 // We found a packet so emit it and jump one whole packet forward in
5295 // the stream
5296 this.trigger('data', everything.subarray(startIndex, endIndex));
5297 startIndex += MP2T_PACKET_LENGTH;
5298 endIndex += MP2T_PACKET_LENGTH;
5299 continue;
5300 }
5301 // If we get here, we have somehow become de-synchronized and we need to step
5302 // forward one byte at a time until we find a pair of sync bytes that denote
5303 // a packet
5304 startIndex++;
5305 endIndex++;
5306 }
5307
5308 // If there was some data left over at the end of the segment that couldn't
5309 // possibly be a whole packet, keep it because it might be the start of a packet
5310 // that continues in the next segment
5311 if (startIndex < everything.byteLength) {
5312 buffer.set(everything.subarray(startIndex), 0);
5313 bytesInBuffer = everything.byteLength - startIndex;
5314 }
5315 };
5316
5317 /**
5318 * Passes identified M2TS packets to the TransportParseStream to be parsed
5319 **/
5320 this.flush = function () {
5321 // If the buffer contains a whole packet when we are being flushed, emit it
5322 // and empty the buffer. Otherwise hold onto the data because it may be
5323 // important for decoding the next segment
5324 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
5325 this.trigger('data', buffer);
5326 bytesInBuffer = 0;
5327 }
5328 this.trigger('done');
5329 };
5330
5331 this.endTimeline = function () {
5332 this.flush();
5333 this.trigger('endedtimeline');
5334 };
5335
5336 this.reset = function () {
5337 bytesInBuffer = 0;
5338 this.trigger('reset');
5339 };
5340 };
5341 _TransportPacketStream.prototype = new stream();
5342
5343 /**
5344 * Accepts an MP2T TransportPacketStream and emits data events with parsed
5345 * forms of the individual transport stream packets.
5346 */
5347 _TransportParseStream = function TransportParseStream() {
5348 var parsePsi, parsePat, parsePmt, self;
5349 _TransportParseStream.prototype.init.call(this);
5350 self = this;
5351
5352 this.packetsWaitingForPmt = [];
5353 this.programMapTable = undefined;
5354
5355 parsePsi = function parsePsi(payload, psi) {
5356 var offset = 0;
5357
5358 // PSI packets may be split into multiple sections and those
5359 // sections may be split into multiple packets. If a PSI
5360 // section starts in this packet, the payload_unit_start_indicator
5361 // will be true and the first byte of the payload will indicate
5362 // the offset from the current position to the start of the
5363 // section.
5364 if (psi.payloadUnitStartIndicator) {
5365 offset += payload[offset] + 1;
5366 }
5367
5368 if (psi.type === 'pat') {
5369 parsePat(payload.subarray(offset), psi);
5370 } else {
5371 parsePmt(payload.subarray(offset), psi);
5372 }
5373 };
5374
5375 parsePat = function parsePat(payload, pat) {
5376 pat.section_number = payload[7]; // eslint-disable-line camelcase
5377 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
5378
5379 // skip the PSI header and parse the first PMT entry
5380 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
5381 pat.pmtPid = self.pmtPid;
5382 };
5383
5384 /**
5385 * Parse out the relevant fields of a Program Map Table (PMT).
5386 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
5387 * packet. The first byte in this array should be the table_id
5388 * field.
5389 * @param pmt {object} the object that should be decorated with
5390 * fields parsed from the PMT.
5391 */
5392 parsePmt = function parsePmt(payload, pmt) {
5393 var sectionLength, tableEnd, programInfoLength, offset;
5394
5395 // PMTs can be sent ahead of the time when they should actually
5396 // take effect. We don't believe this should ever be the case
5397 // for HLS but we'll ignore "forward" PMT declarations if we see
5398 // them. Future PMT declarations have the current_next_indicator
5399 // set to zero.
5400 if (!(payload[5] & 0x01)) {
5401 return;
5402 }
5403
5404 // overwrite any existing program map table
5405 self.programMapTable = {
5406 video: null,
5407 audio: null,
5408 'timed-metadata': {}
5409 };
5410
5411 // the mapping table ends at the end of the current section
5412 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
5413 tableEnd = 3 + sectionLength - 4;
5414
5415 // to determine where the table is, we have to figure out how
5416 // long the program info descriptors are
5417 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
5418
5419 // advance the offset to the first entry in the mapping table
5420 offset = 12 + programInfoLength;
5421 while (offset < tableEnd) {
5422 var streamType = payload[offset];
5423 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
5424
5425 // only map a single elementary_pid for audio and video stream types
5426 // TODO: should this be done for metadata too? for now maintain behavior of
5427 // multiple metadata streams
5428 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
5429 self.programMapTable.video = pid;
5430 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
5431 self.programMapTable.audio = pid;
5432 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
5433 // map pid to stream type for metadata streams
5434 self.programMapTable['timed-metadata'][pid] = streamType;
5435 }
5436
5437 // move to the next table entry
5438 // skip past the elementary stream descriptors, if present
5439 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
5440 }
5441
5442 // record the map on the packet as well
5443 pmt.programMapTable = self.programMapTable;
5444 };
5445
5446 /**
5447 * Deliver a new MP2T packet to the next stream in the pipeline.
5448 */
5449 this.push = function (packet) {
5450 var result = {},
5451 offset = 4;
5452
5453 result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
5454
5455 // pid is a 13-bit field starting at the last bit of packet[1]
5456 result.pid = packet[1] & 0x1f;
5457 result.pid <<= 8;
5458 result.pid |= packet[2];
5459
5460 // if an adaption field is present, its length is specified by the
5461 // fifth byte of the TS packet header. The adaptation field is
5462 // used to add stuffing to PES packets that don't fill a complete
5463 // TS packet, and to specify some forms of timing and control data
5464 // that we do not currently use.
5465 if ((packet[3] & 0x30) >>> 4 > 0x01) {
5466 offset += packet[offset] + 1;
5467 }
5468
5469 // parse the rest of the packet based on the type
5470 if (result.pid === 0) {
5471 result.type = 'pat';
5472 parsePsi(packet.subarray(offset), result);
5473 this.trigger('data', result);
5474 } else if (result.pid === this.pmtPid) {
5475 result.type = 'pmt';
5476 parsePsi(packet.subarray(offset), result);
5477 this.trigger('data', result);
5478
5479 // if there are any packets waiting for a PMT to be found, process them now
5480 while (this.packetsWaitingForPmt.length) {
5481 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
5482 }
5483 } else if (this.programMapTable === undefined) {
5484 // When we have not seen a PMT yet, defer further processing of
5485 // PES packets until one has been parsed
5486 this.packetsWaitingForPmt.push([packet, offset, result]);
5487 } else {
5488 this.processPes_(packet, offset, result);
5489 }
5490 };
5491
5492 this.processPes_ = function (packet, offset, result) {
5493 // set the appropriate stream type
5494 if (result.pid === this.programMapTable.video) {
5495 result.streamType = streamTypes.H264_STREAM_TYPE;
5496 } else if (result.pid === this.programMapTable.audio) {
5497 result.streamType = streamTypes.ADTS_STREAM_TYPE;
5498 } else {
5499 // if not video or audio, it is timed-metadata or unknown
5500 // if unknown, streamType will be undefined
5501 result.streamType = this.programMapTable['timed-metadata'][result.pid];
5502 }
5503
5504 result.type = 'pes';
5505 result.data = packet.subarray(offset);
5506 this.trigger('data', result);
5507 };
5508 };
5509 _TransportParseStream.prototype = new stream();
5510 _TransportParseStream.STREAM_TYPES = {
5511 h264: 0x1b,
5512 adts: 0x0f
5513 };
5514
5515 /**
5516 * Reconsistutes program elementary stream (PES) packets from parsed
5517 * transport stream packets. That is, if you pipe an
5518 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
5519 * events will be events which capture the bytes for individual PES
5520 * packets plus relevant metadata that has been extracted from the
5521 * container.
5522 */
5523 _ElementaryStream = function ElementaryStream() {
5524 var self = this,
5525
5526
5527 // PES packet fragments
5528 video = {
5529 data: [],
5530 size: 0
5531 },
5532 audio = {
5533 data: [],
5534 size: 0
5535 },
5536 timedMetadata = {
5537 data: [],
5538 size: 0
5539 },
5540 programMapTable,
5541 parsePes = function parsePes(payload, pes) {
5542 var ptsDtsFlags;
5543
5544 // get the packet length, this will be 0 for video
5545 pes.packetLength = 6 + (payload[4] << 8 | payload[5]);
5546
5547 // find out if this packets starts a new keyframe
5548 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
5549 // PES packets may be annotated with a PTS value, or a PTS value
5550 // and a DTS value. Determine what combination of values is
5551 // available to work with.
5552 ptsDtsFlags = payload[7];
5553
5554 // PTS and DTS are normally stored as a 33-bit number. Javascript
5555 // performs all bitwise operations on 32-bit integers but javascript
5556 // supports a much greater range (52-bits) of integer using standard
5557 // mathematical operations.
5558 // We construct a 31-bit value using bitwise operators over the 31
5559 // most significant bits and then multiply by 4 (equal to a left-shift
5560 // of 2) before we add the final 2 least significant bits of the
5561 // timestamp (equal to an OR.)
5562 if (ptsDtsFlags & 0xC0) {
5563 // the PTS and DTS are not written out directly. For information
5564 // on how they are encoded, see
5565 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
5566 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
5567 pes.pts *= 4; // Left shift by 2
5568 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
5569 pes.dts = pes.pts;
5570 if (ptsDtsFlags & 0x40) {
5571 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
5572 pes.dts *= 4; // Left shift by 2
5573 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
5574 }
5575 }
5576 // the data section starts immediately after the PES header.
5577 // pes_header_data_length specifies the number of header bytes
5578 // that follow the last byte of the field.
5579 pes.data = payload.subarray(9 + payload[8]);
5580 },
5581
5582
5583 /**
5584 * Pass completely parsed PES packets to the next stream in the pipeline
5585 **/
5586 flushStream = function flushStream(stream$$1, type, forceFlush) {
5587 var packetData = new Uint8Array(stream$$1.size),
5588 event = {
5589 type: type
5590 },
5591 i = 0,
5592 offset = 0,
5593 packetFlushable = false,
5594 fragment;
5595
5596 // do nothing if there is not enough buffered data for a complete
5597 // PES header
5598 if (!stream$$1.data.length || stream$$1.size < 9) {
5599 return;
5600 }
5601 event.trackId = stream$$1.data[0].pid;
5602
5603 // reassemble the packet
5604 for (i = 0; i < stream$$1.data.length; i++) {
5605 fragment = stream$$1.data[i];
5606
5607 packetData.set(fragment.data, offset);
5608 offset += fragment.data.byteLength;
5609 }
5610
5611 // parse assembled packet's PES header
5612 parsePes(packetData, event);
5613
5614 // non-video PES packets MUST have a non-zero PES_packet_length
5615 // check that there is enough stream data to fill the packet
5616 packetFlushable = type === 'video' || event.packetLength <= stream$$1.size;
5617
5618 // flush pending packets if the conditions are right
5619 if (forceFlush || packetFlushable) {
5620 stream$$1.size = 0;
5621 stream$$1.data.length = 0;
5622 }
5623
5624 // only emit packets that are complete. this is to avoid assembling
5625 // incomplete PES packets due to poor segmentation
5626 if (packetFlushable) {
5627 self.trigger('data', event);
5628 }
5629 };
5630
5631 _ElementaryStream.prototype.init.call(this);
5632
5633 /**
5634 * Identifies M2TS packet types and parses PES packets using metadata
5635 * parsed from the PMT
5636 **/
5637 this.push = function (data) {
5638 ({
5639 pat: function pat() {
5640 // we have to wait for the PMT to arrive as well before we
5641 // have any meaningful metadata
5642 },
5643 pes: function pes() {
5644 var stream$$1, streamType;
5645
5646 switch (data.streamType) {
5647 case streamTypes.H264_STREAM_TYPE:
5648 stream$$1 = video;
5649 streamType = 'video';
5650 break;
5651 case streamTypes.ADTS_STREAM_TYPE:
5652 stream$$1 = audio;
5653 streamType = 'audio';
5654 break;
5655 case streamTypes.METADATA_STREAM_TYPE:
5656 stream$$1 = timedMetadata;
5657 streamType = 'timed-metadata';
5658 break;
5659 default:
5660 // ignore unknown stream types
5661 return;
5662 }
5663
5664 // if a new packet is starting, we can flush the completed
5665 // packet
5666 if (data.payloadUnitStartIndicator) {
5667 flushStream(stream$$1, streamType, true);
5668 }
5669
5670 // buffer this fragment until we are sure we've received the
5671 // complete payload
5672 stream$$1.data.push(data);
5673 stream$$1.size += data.data.byteLength;
5674 },
5675 pmt: function pmt() {
5676 var event = {
5677 type: 'metadata',
5678 tracks: []
5679 };
5680
5681 programMapTable = data.programMapTable;
5682
5683 // translate audio and video streams to tracks
5684 if (programMapTable.video !== null) {
5685 event.tracks.push({
5686 timelineStartInfo: {
5687 baseMediaDecodeTime: 0
5688 },
5689 id: +programMapTable.video,
5690 codec: 'avc',
5691 type: 'video'
5692 });
5693 }
5694 if (programMapTable.audio !== null) {
5695 event.tracks.push({
5696 timelineStartInfo: {
5697 baseMediaDecodeTime: 0
5698 },
5699 id: +programMapTable.audio,
5700 codec: 'adts',
5701 type: 'audio'
5702 });
5703 }
5704
5705 self.trigger('data', event);
5706 }
5707 })[data.type]();
5708 };
5709
5710 this.reset = function () {
5711 video.size = 0;
5712 video.data.length = 0;
5713 audio.size = 0;
5714 audio.data.length = 0;
5715 this.trigger('reset');
5716 };
5717
5718 /**
5719 * Flush any remaining input. Video PES packets may be of variable
5720 * length. Normally, the start of a new video packet can trigger the
5721 * finalization of the previous packet. That is not possible if no
5722 * more video is forthcoming, however. In that case, some other
5723 * mechanism (like the end of the file) has to be employed. When it is
5724 * clear that no additional data is forthcoming, calling this method
5725 * will flush the buffered packets.
5726 */
5727 this.flushStreams_ = function () {
5728 // !!THIS ORDER IS IMPORTANT!!
5729 // video first then audio
5730 flushStream(video, 'video');
5731 flushStream(audio, 'audio');
5732 flushStream(timedMetadata, 'timed-metadata');
5733 };
5734
5735 this.flush = function () {
5736 this.flushStreams_();
5737 this.trigger('done');
5738 };
5739 };
5740 _ElementaryStream.prototype = new stream();
5741
5742 var m2ts = {
5743 PAT_PID: 0x0000,
5744 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
5745 TransportPacketStream: _TransportPacketStream,
5746 TransportParseStream: _TransportParseStream,
5747 ElementaryStream: _ElementaryStream,
5748 TimestampRolloverStream: TimestampRolloverStream$1,
5749 CaptionStream: captionStream.CaptionStream,
5750 Cea608Stream: captionStream.Cea608Stream,
5751 MetadataStream: metadataStream
5752 };
5753
5754 for (var type in streamTypes) {
5755 if (streamTypes.hasOwnProperty(type)) {
5756 m2ts[type] = streamTypes[type];
5757 }
5758 }
5759
5760 var m2ts_1 = m2ts;
5761
5762 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
5763
5764 var _AdtsStream;
5765
5766 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
5767
5768 /*
5769 * Accepts a ElementaryStream and emits data events with parsed
5770 * AAC Audio Frames of the individual packets. Input audio in ADTS
5771 * format is unpacked and re-emitted as AAC frames.
5772 *
5773 * @see http://wiki.multimedia.cx/index.php?title=ADTS
5774 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
5775 */
5776 _AdtsStream = function AdtsStream(handlePartialSegments) {
5777 var buffer,
5778 frameNum = 0;
5779
5780 _AdtsStream.prototype.init.call(this);
5781
5782 this.push = function (packet) {
5783 var i = 0,
5784 frameLength,
5785 protectionSkipBytes,
5786 frameEnd,
5787 oldBuffer,
5788 sampleCount,
5789 adtsFrameDuration;
5790
5791 if (!handlePartialSegments) {
5792 frameNum = 0;
5793 }
5794
5795 if (packet.type !== 'audio') {
5796 // ignore non-audio data
5797 return;
5798 }
5799
5800 // Prepend any data in the buffer to the input data so that we can parse
5801 // aac frames the cross a PES packet boundary
5802 if (buffer) {
5803 oldBuffer = buffer;
5804 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
5805 buffer.set(oldBuffer);
5806 buffer.set(packet.data, oldBuffer.byteLength);
5807 } else {
5808 buffer = packet.data;
5809 }
5810
5811 // unpack any ADTS frames which have been fully received
5812 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
5813 while (i + 5 < buffer.length) {
5814
5815 // Look for the start of an ADTS header..
5816 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
5817 // If a valid header was not found, jump one forward and attempt to
5818 // find a valid ADTS header starting at the next byte
5819 i++;
5820 continue;
5821 }
5822
5823 // The protection skip bit tells us if we have 2 bytes of CRC data at the
5824 // end of the ADTS header
5825 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
5826
5827 // Frame length is a 13 bit integer starting 16 bits from the
5828 // end of the sync sequence
5829 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
5830
5831 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
5832 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
5833
5834 frameEnd = i + frameLength;
5835
5836 // If we don't have enough data to actually finish this ADTS frame, return
5837 // and wait for more data
5838 if (buffer.byteLength < frameEnd) {
5839 return;
5840 }
5841
5842 // Otherwise, deliver the complete AAC frame
5843 this.trigger('data', {
5844 pts: packet.pts + frameNum * adtsFrameDuration,
5845 dts: packet.dts + frameNum * adtsFrameDuration,
5846 sampleCount: sampleCount,
5847 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
5848 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
5849 samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
5850 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
5851 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
5852 samplesize: 16,
5853 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
5854 });
5855
5856 frameNum++;
5857
5858 // If the buffer is empty, clear it and return
5859 if (buffer.byteLength === frameEnd) {
5860 buffer = undefined;
5861 return;
5862 }
5863
5864 // Remove the finished frame from the buffer and start the process again
5865 buffer = buffer.subarray(frameEnd);
5866 }
5867 };
5868
5869 this.flush = function () {
5870 frameNum = 0;
5871 this.trigger('done');
5872 };
5873
5874 this.reset = function () {
5875 buffer = void 0;
5876 this.trigger('reset');
5877 };
5878
5879 this.endTimeline = function () {
5880 buffer = void 0;
5881 this.trigger('endedtimeline');
5882 };
5883 };
5884
5885 _AdtsStream.prototype = new stream();
5886
5887 var adts = _AdtsStream;
5888
5889 /**
5890 * mux.js
5891 *
5892 * Copyright (c) Brightcove
5893 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5894 */
5895
5896 var ExpGolomb;
5897
5898 /**
5899 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
5900 * scheme used by h264.
5901 */
5902 ExpGolomb = function ExpGolomb(workingData) {
5903 var
5904 // the number of bytes left to examine in workingData
5905 workingBytesAvailable = workingData.byteLength,
5906
5907
5908 // the current word being examined
5909 workingWord = 0,
5910
5911 // :uint
5912
5913 // the number of bits left to examine in the current word
5914 workingBitsAvailable = 0; // :uint;
5915
5916 // ():uint
5917 this.length = function () {
5918 return 8 * workingBytesAvailable;
5919 };
5920
5921 // ():uint
5922 this.bitsAvailable = function () {
5923 return 8 * workingBytesAvailable + workingBitsAvailable;
5924 };
5925
5926 // ():void
5927 this.loadWord = function () {
5928 var position = workingData.byteLength - workingBytesAvailable,
5929 workingBytes = new Uint8Array(4),
5930 availableBytes = Math.min(4, workingBytesAvailable);
5931
5932 if (availableBytes === 0) {
5933 throw new Error('no bytes available');
5934 }
5935
5936 workingBytes.set(workingData.subarray(position, position + availableBytes));
5937 workingWord = new DataView(workingBytes.buffer).getUint32(0);
5938
5939 // track the amount of workingData that has been processed
5940 workingBitsAvailable = availableBytes * 8;
5941 workingBytesAvailable -= availableBytes;
5942 };
5943
5944 // (count:int):void
5945 this.skipBits = function (count) {
5946 var skipBytes; // :int
5947 if (workingBitsAvailable > count) {
5948 workingWord <<= count;
5949 workingBitsAvailable -= count;
5950 } else {
5951 count -= workingBitsAvailable;
5952 skipBytes = Math.floor(count / 8);
5953
5954 count -= skipBytes * 8;
5955 workingBytesAvailable -= skipBytes;
5956
5957 this.loadWord();
5958
5959 workingWord <<= count;
5960 workingBitsAvailable -= count;
5961 }
5962 };
5963
5964 // (size:int):uint
5965 this.readBits = function (size) {
5966 var bits = Math.min(workingBitsAvailable, size),
5967
5968 // :uint
5969 valu = workingWord >>> 32 - bits; // :uint
5970 // if size > 31, handle error
5971 workingBitsAvailable -= bits;
5972 if (workingBitsAvailable > 0) {
5973 workingWord <<= bits;
5974 } else if (workingBytesAvailable > 0) {
5975 this.loadWord();
5976 }
5977
5978 bits = size - bits;
5979 if (bits > 0) {
5980 return valu << bits | this.readBits(bits);
5981 }
5982 return valu;
5983 };
5984
5985 // ():uint
5986 this.skipLeadingZeros = function () {
5987 var leadingZeroCount; // :uint
5988 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
5989 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
5990 // the first bit of working word is 1
5991 workingWord <<= leadingZeroCount;
5992 workingBitsAvailable -= leadingZeroCount;
5993 return leadingZeroCount;
5994 }
5995 }
5996
5997 // we exhausted workingWord and still have not found a 1
5998 this.loadWord();
5999 return leadingZeroCount + this.skipLeadingZeros();
6000 };
6001
6002 // ():void
6003 this.skipUnsignedExpGolomb = function () {
6004 this.skipBits(1 + this.skipLeadingZeros());
6005 };
6006
6007 // ():void
6008 this.skipExpGolomb = function () {
6009 this.skipBits(1 + this.skipLeadingZeros());
6010 };
6011
6012 // ():uint
6013 this.readUnsignedExpGolomb = function () {
6014 var clz = this.skipLeadingZeros(); // :uint
6015 return this.readBits(clz + 1) - 1;
6016 };
6017
6018 // ():int
6019 this.readExpGolomb = function () {
6020 var valu = this.readUnsignedExpGolomb(); // :int
6021 if (0x01 & valu) {
6022 // the number is odd if the low order bit is set
6023 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
6024 }
6025 return -1 * (valu >>> 1); // divide by two then make it negative
6026 };
6027
6028 // Some convenience functions
6029 // :Boolean
6030 this.readBoolean = function () {
6031 return this.readBits(1) === 1;
6032 };
6033
6034 // ():int
6035 this.readUnsignedByte = function () {
6036 return this.readBits(8);
6037 };
6038
6039 this.loadWord();
6040 };
6041
6042 var expGolomb = ExpGolomb;
6043
6044 var _H264Stream, _NalByteStream;
6045 var PROFILES_WITH_OPTIONAL_SPS_DATA;
6046
6047 /**
6048 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
6049 */
6050 _NalByteStream = function NalByteStream() {
6051 var syncPoint = 0,
6052 i,
6053 buffer;
6054 _NalByteStream.prototype.init.call(this);
6055
6056 /*
6057 * Scans a byte stream and triggers a data event with the NAL units found.
6058 * @param {Object} data Event received from H264Stream
6059 * @param {Uint8Array} data.data The h264 byte stream to be scanned
6060 *
6061 * @see H264Stream.push
6062 */
6063 this.push = function (data) {
6064 var swapBuffer;
6065
6066 if (!buffer) {
6067 buffer = data.data;
6068 } else {
6069 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
6070 swapBuffer.set(buffer);
6071 swapBuffer.set(data.data, buffer.byteLength);
6072 buffer = swapBuffer;
6073 }
6074 var len = buffer.byteLength;
6075
6076 // Rec. ITU-T H.264, Annex B
6077 // scan for NAL unit boundaries
6078
6079 // a match looks like this:
6080 // 0 0 1 .. NAL .. 0 0 1
6081 // ^ sync point ^ i
6082 // or this:
6083 // 0 0 1 .. NAL .. 0 0 0
6084 // ^ sync point ^ i
6085
6086 // advance the sync point to a NAL start, if necessary
6087 for (; syncPoint < len - 3; syncPoint++) {
6088 if (buffer[syncPoint + 2] === 1) {
6089 // the sync point is properly aligned
6090 i = syncPoint + 5;
6091 break;
6092 }
6093 }
6094
6095 while (i < len) {
6096 // look at the current byte to determine if we've hit the end of
6097 // a NAL unit boundary
6098 switch (buffer[i]) {
6099 case 0:
6100 // skip past non-sync sequences
6101 if (buffer[i - 1] !== 0) {
6102 i += 2;
6103 break;
6104 } else if (buffer[i - 2] !== 0) {
6105 i++;
6106 break;
6107 }
6108
6109 // deliver the NAL unit if it isn't empty
6110 if (syncPoint + 3 !== i - 2) {
6111 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
6112 }
6113
6114 // drop trailing zeroes
6115 do {
6116 i++;
6117 } while (buffer[i] !== 1 && i < len);
6118 syncPoint = i - 2;
6119 i += 3;
6120 break;
6121 case 1:
6122 // skip past non-sync sequences
6123 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
6124 i += 3;
6125 break;
6126 }
6127
6128 // deliver the NAL unit
6129 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
6130 syncPoint = i - 2;
6131 i += 3;
6132 break;
6133 default:
6134 // the current byte isn't a one or zero, so it cannot be part
6135 // of a sync sequence
6136 i += 3;
6137 break;
6138 }
6139 }
6140 // filter out the NAL units that were delivered
6141 buffer = buffer.subarray(syncPoint);
6142 i -= syncPoint;
6143 syncPoint = 0;
6144 };
6145
6146 this.reset = function () {
6147 buffer = null;
6148 syncPoint = 0;
6149 this.trigger('reset');
6150 };
6151
6152 this.flush = function () {
6153 // deliver the last buffered NAL unit
6154 if (buffer && buffer.byteLength > 3) {
6155 this.trigger('data', buffer.subarray(syncPoint + 3));
6156 }
6157 // reset the stream state
6158 buffer = null;
6159 syncPoint = 0;
6160 this.trigger('done');
6161 };
6162
6163 this.endTimeline = function () {
6164 this.flush();
6165 this.trigger('endedtimeline');
6166 };
6167 };
6168 _NalByteStream.prototype = new stream();
6169
6170 // values of profile_idc that indicate additional fields are included in the SPS
6171 // see Recommendation ITU-T H.264 (4/2013),
6172 // 7.3.2.1.1 Sequence parameter set data syntax
6173 PROFILES_WITH_OPTIONAL_SPS_DATA = {
6174 100: true,
6175 110: true,
6176 122: true,
6177 244: true,
6178 44: true,
6179 83: true,
6180 86: true,
6181 118: true,
6182 128: true,
6183 138: true,
6184 139: true,
6185 134: true
6186 };
6187
6188 /**
6189 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
6190 * events.
6191 */
6192 _H264Stream = function H264Stream() {
6193 var nalByteStream = new _NalByteStream(),
6194 self,
6195 trackId,
6196 currentPts,
6197 currentDts,
6198 discardEmulationPreventionBytes,
6199 readSequenceParameterSet,
6200 skipScalingList;
6201
6202 _H264Stream.prototype.init.call(this);
6203 self = this;
6204
6205 /*
6206 * Pushes a packet from a stream onto the NalByteStream
6207 *
6208 * @param {Object} packet - A packet received from a stream
6209 * @param {Uint8Array} packet.data - The raw bytes of the packet
6210 * @param {Number} packet.dts - Decode timestamp of the packet
6211 * @param {Number} packet.pts - Presentation timestamp of the packet
6212 * @param {Number} packet.trackId - The id of the h264 track this packet came from
6213 * @param {('video'|'audio')} packet.type - The type of packet
6214 *
6215 */
6216 this.push = function (packet) {
6217 if (packet.type !== 'video') {
6218 return;
6219 }
6220 trackId = packet.trackId;
6221 currentPts = packet.pts;
6222 currentDts = packet.dts;
6223
6224 nalByteStream.push(packet);
6225 };
6226
6227 /*
6228 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
6229 * for the NALUs to the next stream component.
6230 * Also, preprocess caption and sequence parameter NALUs.
6231 *
6232 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
6233 * @see NalByteStream.push
6234 */
6235 nalByteStream.on('data', function (data) {
6236 var event = {
6237 trackId: trackId,
6238 pts: currentPts,
6239 dts: currentDts,
6240 data: data
6241 };
6242
6243 switch (data[0] & 0x1f) {
6244 case 0x05:
6245 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
6246 break;
6247 case 0x06:
6248 event.nalUnitType = 'sei_rbsp';
6249 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
6250 break;
6251 case 0x07:
6252 event.nalUnitType = 'seq_parameter_set_rbsp';
6253 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
6254 event.config = readSequenceParameterSet(event.escapedRBSP);
6255 break;
6256 case 0x08:
6257 event.nalUnitType = 'pic_parameter_set_rbsp';
6258 break;
6259 case 0x09:
6260 event.nalUnitType = 'access_unit_delimiter_rbsp';
6261 break;
6262
6263 default:
6264 break;
6265 }
6266 // This triggers data on the H264Stream
6267 self.trigger('data', event);
6268 });
6269 nalByteStream.on('done', function () {
6270 self.trigger('done');
6271 });
6272 nalByteStream.on('partialdone', function () {
6273 self.trigger('partialdone');
6274 });
6275 nalByteStream.on('reset', function () {
6276 self.trigger('reset');
6277 });
6278 nalByteStream.on('endedtimeline', function () {
6279 self.trigger('endedtimeline');
6280 });
6281
6282 this.flush = function () {
6283 nalByteStream.flush();
6284 };
6285
6286 this.partialFlush = function () {
6287 nalByteStream.partialFlush();
6288 };
6289
6290 this.reset = function () {
6291 nalByteStream.reset();
6292 };
6293
6294 this.endTimeline = function () {
6295 nalByteStream.endTimeline();
6296 };
6297
6298 /**
6299 * Advance the ExpGolomb decoder past a scaling list. The scaling
6300 * list is optionally transmitted as part of a sequence parameter
6301 * set and is not relevant to transmuxing.
6302 * @param count {number} the number of entries in this scaling list
6303 * @param expGolombDecoder {object} an ExpGolomb pointed to the
6304 * start of a scaling list
6305 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
6306 */
6307 skipScalingList = function skipScalingList(count, expGolombDecoder) {
6308 var lastScale = 8,
6309 nextScale = 8,
6310 j,
6311 deltaScale;
6312
6313 for (j = 0; j < count; j++) {
6314 if (nextScale !== 0) {
6315 deltaScale = expGolombDecoder.readExpGolomb();
6316 nextScale = (lastScale + deltaScale + 256) % 256;
6317 }
6318
6319 lastScale = nextScale === 0 ? lastScale : nextScale;
6320 }
6321 };
6322
6323 /**
6324 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
6325 * Sequence Payload"
6326 * @param data {Uint8Array} the bytes of a RBSP from a NAL
6327 * unit
6328 * @return {Uint8Array} the RBSP without any Emulation
6329 * Prevention Bytes
6330 */
6331 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
6332 var length = data.byteLength,
6333 emulationPreventionBytesPositions = [],
6334 i = 1,
6335 newLength,
6336 newData;
6337
6338 // Find all `Emulation Prevention Bytes`
6339 while (i < length - 2) {
6340 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
6341 emulationPreventionBytesPositions.push(i + 2);
6342 i += 2;
6343 } else {
6344 i++;
6345 }
6346 }
6347
6348 // If no Emulation Prevention Bytes were found just return the original
6349 // array
6350 if (emulationPreventionBytesPositions.length === 0) {
6351 return data;
6352 }
6353
6354 // Create a new array to hold the NAL unit data
6355 newLength = length - emulationPreventionBytesPositions.length;
6356 newData = new Uint8Array(newLength);
6357 var sourceIndex = 0;
6358
6359 for (i = 0; i < newLength; sourceIndex++, i++) {
6360 if (sourceIndex === emulationPreventionBytesPositions[0]) {
6361 // Skip this byte
6362 sourceIndex++;
6363 // Remove this position index
6364 emulationPreventionBytesPositions.shift();
6365 }
6366 newData[i] = data[sourceIndex];
6367 }
6368
6369 return newData;
6370 };
6371
6372 /**
6373 * Read a sequence parameter set and return some interesting video
6374 * properties. A sequence parameter set is the H264 metadata that
6375 * describes the properties of upcoming video frames.
6376 * @param data {Uint8Array} the bytes of a sequence parameter set
6377 * @return {object} an object with configuration parsed from the
6378 * sequence parameter set, including the dimensions of the
6379 * associated video frames.
6380 */
6381 readSequenceParameterSet = function readSequenceParameterSet(data) {
6382 var frameCropLeftOffset = 0,
6383 frameCropRightOffset = 0,
6384 frameCropTopOffset = 0,
6385 frameCropBottomOffset = 0,
6386 sarScale = 1,
6387 expGolombDecoder,
6388 profileIdc,
6389 levelIdc,
6390 profileCompatibility,
6391 chromaFormatIdc,
6392 picOrderCntType,
6393 numRefFramesInPicOrderCntCycle,
6394 picWidthInMbsMinus1,
6395 picHeightInMapUnitsMinus1,
6396 frameMbsOnlyFlag,
6397 scalingListCount,
6398 sarRatio,
6399 aspectRatioIdc,
6400 i;
6401
6402 expGolombDecoder = new expGolomb(data);
6403 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
6404 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
6405 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
6406 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
6407
6408 // some profiles have more optional data we don't need
6409 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
6410 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
6411 if (chromaFormatIdc === 3) {
6412 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
6413 }
6414 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
6415 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
6416 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
6417 if (expGolombDecoder.readBoolean()) {
6418 // seq_scaling_matrix_present_flag
6419 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
6420 for (i = 0; i < scalingListCount; i++) {
6421 if (expGolombDecoder.readBoolean()) {
6422 // seq_scaling_list_present_flag[ i ]
6423 if (i < 6) {
6424 skipScalingList(16, expGolombDecoder);
6425 } else {
6426 skipScalingList(64, expGolombDecoder);
6427 }
6428 }
6429 }
6430 }
6431 }
6432
6433 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
6434 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
6435
6436 if (picOrderCntType === 0) {
6437 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
6438 } else if (picOrderCntType === 1) {
6439 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
6440 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
6441 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
6442 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
6443 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
6444 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
6445 }
6446 }
6447
6448 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
6449 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
6450
6451 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
6452 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
6453
6454 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
6455 if (frameMbsOnlyFlag === 0) {
6456 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
6457 }
6458
6459 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
6460 if (expGolombDecoder.readBoolean()) {
6461 // frame_cropping_flag
6462 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
6463 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
6464 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
6465 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
6466 }
6467 if (expGolombDecoder.readBoolean()) {
6468 // vui_parameters_present_flag
6469 if (expGolombDecoder.readBoolean()) {
6470 // aspect_ratio_info_present_flag
6471 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
6472 switch (aspectRatioIdc) {
6473 case 1:
6474 sarRatio = [1, 1];break;
6475 case 2:
6476 sarRatio = [12, 11];break;
6477 case 3:
6478 sarRatio = [10, 11];break;
6479 case 4:
6480 sarRatio = [16, 11];break;
6481 case 5:
6482 sarRatio = [40, 33];break;
6483 case 6:
6484 sarRatio = [24, 11];break;
6485 case 7:
6486 sarRatio = [20, 11];break;
6487 case 8:
6488 sarRatio = [32, 11];break;
6489 case 9:
6490 sarRatio = [80, 33];break;
6491 case 10:
6492 sarRatio = [18, 11];break;
6493 case 11:
6494 sarRatio = [15, 11];break;
6495 case 12:
6496 sarRatio = [64, 33];break;
6497 case 13:
6498 sarRatio = [160, 99];break;
6499 case 14:
6500 sarRatio = [4, 3];break;
6501 case 15:
6502 sarRatio = [3, 2];break;
6503 case 16:
6504 sarRatio = [2, 1];break;
6505 case 255:
6506 {
6507 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
6508 break;
6509 }
6510 }
6511 if (sarRatio) {
6512 sarScale = sarRatio[0] / sarRatio[1];
6513 }
6514 }
6515 }
6516 return {
6517 profileIdc: profileIdc,
6518 levelIdc: levelIdc,
6519 profileCompatibility: profileCompatibility,
6520 width: Math.ceil(((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
6521 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
6522 sarRatio: sarRatio
6523 };
6524 };
6525 };
6526 _H264Stream.prototype = new stream();
6527
6528 var h264 = {
6529 H264Stream: _H264Stream,
6530 NalByteStream: _NalByteStream
6531 };
6532
6533 /**
6534 * mux.js
6535 *
6536 * Copyright (c) Brightcove
6537 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
6538 *
6539 * Utilities to detect basic properties and metadata about Aac data.
6540 */
6541
6542 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
6543
6544 var isLikelyAacData = function isLikelyAacData(data) {
6545 if (data[0] === 'I'.charCodeAt(0) && data[1] === 'D'.charCodeAt(0) && data[2] === '3'.charCodeAt(0)) {
6546 return true;
6547 }
6548 return false;
6549 };
6550
6551 var parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
6552 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
6553 };
6554
6555 // return a percent-encoded representation of the specified byte range
6556 // @see http://en.wikipedia.org/wiki/Percent-encoding
6557 var percentEncode$1 = function percentEncode(bytes, start, end) {
6558 var i,
6559 result = '';
6560 for (i = start; i < end; i++) {
6561 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
6562 }
6563 return result;
6564 };
6565
6566 // return the string representation of the specified byte range,
6567 // interpreted as ISO-8859-1.
6568 var parseIso88591$1 = function parseIso88591(bytes, start, end) {
6569 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
6570 };
6571
6572 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
6573 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
6574 flags = header[byteIndex + 5],
6575 footerPresent = (flags & 16) >> 4;
6576
6577 if (footerPresent) {
6578 return returnSize + 20;
6579 }
6580 return returnSize + 10;
6581 };
6582
6583 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
6584 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
6585 middle = header[byteIndex + 4] << 3,
6586 highTwo = header[byteIndex + 3] & 0x3 << 11;
6587
6588 return highTwo | middle | lowThree;
6589 };
6590
6591 var parseType = function parseType(header, byteIndex) {
6592 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
6593 return 'timed-metadata';
6594 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
6595 return 'audio';
6596 }
6597 return null;
6598 };
6599
6600 var parseSampleRate = function parseSampleRate(packet) {
6601 var i = 0;
6602
6603 while (i + 5 < packet.length) {
6604 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
6605 // If a valid header was not found, jump one forward and attempt to
6606 // find a valid ADTS header starting at the next byte
6607 i++;
6608 continue;
6609 }
6610 return ADTS_SAMPLING_FREQUENCIES$1[(packet[i + 2] & 0x3c) >>> 2];
6611 }
6612
6613 return null;
6614 };
6615
6616 var parseAacTimestamp = function parseAacTimestamp(packet) {
6617 var frameStart, frameSize, frame, frameHeader;
6618
6619 // find the start of the first frame and the end of the tag
6620 frameStart = 10;
6621 if (packet[5] & 0x40) {
6622 // advance the frame start past the extended header
6623 frameStart += 4; // header size field
6624 frameStart += parseSyncSafeInteger$1(packet.subarray(10, 14));
6625 }
6626
6627 // parse one or more ID3 frames
6628 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
6629 do {
6630 // determine the number of bytes in this frame
6631 frameSize = parseSyncSafeInteger$1(packet.subarray(frameStart + 4, frameStart + 8));
6632 if (frameSize < 1) {
6633 return null;
6634 }
6635 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
6636
6637 if (frameHeader === 'PRIV') {
6638 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
6639
6640 for (var i = 0; i < frame.byteLength; i++) {
6641 if (frame[i] === 0) {
6642 var owner = parseIso88591$1(frame, 0, i);
6643 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
6644 var d = frame.subarray(i + 1);
6645 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
6646 size *= 4;
6647 size += d[7] & 0x03;
6648
6649 return size;
6650 }
6651 break;
6652 }
6653 }
6654 }
6655
6656 frameStart += 10; // advance past the frame header
6657 frameStart += frameSize; // advance past the frame body
6658 } while (frameStart < packet.byteLength);
6659 return null;
6660 };
6661
6662 var utils = {
6663 isLikelyAacData: isLikelyAacData,
6664 parseId3TagSize: parseId3TagSize,
6665 parseAdtsSize: parseAdtsSize,
6666 parseType: parseType,
6667 parseSampleRate: parseSampleRate,
6668 parseAacTimestamp: parseAacTimestamp
6669 };
6670
6671 // Constants
6672 var _AacStream;
6673
6674 /**
6675 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
6676 */
6677
6678 _AacStream = function AacStream() {
6679 var everything = new Uint8Array(),
6680 timeStamp = 0;
6681
6682 _AacStream.prototype.init.call(this);
6683
6684 this.setTimestamp = function (timestamp) {
6685 timeStamp = timestamp;
6686 };
6687
6688 this.push = function (bytes) {
6689 var frameSize = 0,
6690 byteIndex = 0,
6691 bytesLeft,
6692 chunk,
6693 packet,
6694 tempLength;
6695
6696 // If there are bytes remaining from the last segment, prepend them to the
6697 // bytes that were pushed in
6698 if (everything.length) {
6699 tempLength = everything.length;
6700 everything = new Uint8Array(bytes.byteLength + tempLength);
6701 everything.set(everything.subarray(0, tempLength));
6702 everything.set(bytes, tempLength);
6703 } else {
6704 everything = bytes;
6705 }
6706
6707 while (everything.length - byteIndex >= 3) {
6708 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
6709
6710 // Exit early because we don't have enough to parse
6711 // the ID3 tag header
6712 if (everything.length - byteIndex < 10) {
6713 break;
6714 }
6715
6716 // check framesize
6717 frameSize = utils.parseId3TagSize(everything, byteIndex);
6718
6719 // Exit early if we don't have enough in the buffer
6720 // to emit a full packet
6721 // Add to byteIndex to support multiple ID3 tags in sequence
6722 if (byteIndex + frameSize > everything.length) {
6723 break;
6724 }
6725 chunk = {
6726 type: 'timed-metadata',
6727 data: everything.subarray(byteIndex, byteIndex + frameSize)
6728 };
6729 this.trigger('data', chunk);
6730 byteIndex += frameSize;
6731 continue;
6732 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
6733
6734 // Exit early because we don't have enough to parse
6735 // the ADTS frame header
6736 if (everything.length - byteIndex < 7) {
6737 break;
6738 }
6739
6740 frameSize = utils.parseAdtsSize(everything, byteIndex);
6741
6742 // Exit early if we don't have enough in the buffer
6743 // to emit a full packet
6744 if (byteIndex + frameSize > everything.length) {
6745 break;
6746 }
6747
6748 packet = {
6749 type: 'audio',
6750 data: everything.subarray(byteIndex, byteIndex + frameSize),
6751 pts: timeStamp,
6752 dts: timeStamp
6753 };
6754 this.trigger('data', packet);
6755 byteIndex += frameSize;
6756 continue;
6757 }
6758 byteIndex++;
6759 }
6760 bytesLeft = everything.length - byteIndex;
6761
6762 if (bytesLeft > 0) {
6763 everything = everything.subarray(byteIndex);
6764 } else {
6765 everything = new Uint8Array();
6766 }
6767 };
6768
6769 this.reset = function () {
6770 everything = new Uint8Array();
6771 this.trigger('reset');
6772 };
6773
6774 this.endTimeline = function () {
6775 everything = new Uint8Array();
6776 this.trigger('endedtimeline');
6777 };
6778 };
6779
6780 _AacStream.prototype = new stream();
6781
6782 var aac = _AacStream;
6783
6784 // constants
6785 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
6786
6787 var audioProperties = AUDIO_PROPERTIES;
6788
6789 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
6790
6791 var videoProperties = VIDEO_PROPERTIES;
6792
6793 var H264Stream = h264.H264Stream;
6794
6795 var isLikelyAacData$1 = utils.isLikelyAacData;
6796 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
6797
6798 // object types
6799 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
6800
6801 /**
6802 * Compare two arrays (even typed) for same-ness
6803 */
6804 var arrayEquals = function arrayEquals(a, b) {
6805 var i;
6806
6807 if (a.length !== b.length) {
6808 return false;
6809 }
6810
6811 // compare the value of each element in the array
6812 for (i = 0; i < a.length; i++) {
6813 if (a[i] !== b[i]) {
6814 return false;
6815 }
6816 }
6817
6818 return true;
6819 };
6820
6821 var generateVideoSegmentTimingInfo = function generateVideoSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
6822 var ptsOffsetFromDts = startPts - startDts,
6823 decodeDuration = endDts - startDts,
6824 presentationDuration = endPts - startPts;
6825
6826 // The PTS and DTS values are based on the actual stream times from the segment,
6827 // however, the player time values will reflect a start from the baseMediaDecodeTime.
6828 // In order to provide relevant values for the player times, base timing info on the
6829 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
6830 return {
6831 start: {
6832 dts: baseMediaDecodeTime,
6833 pts: baseMediaDecodeTime + ptsOffsetFromDts
6834 },
6835 end: {
6836 dts: baseMediaDecodeTime + decodeDuration,
6837 pts: baseMediaDecodeTime + presentationDuration
6838 },
6839 prependedContentDuration: prependedContentDuration,
6840 baseMediaDecodeTime: baseMediaDecodeTime
6841 };
6842 };
6843
6844 /**
6845 * Constructs a single-track, ISO BMFF media segment from AAC data
6846 * events. The output of this stream can be fed to a SourceBuffer
6847 * configured with a suitable initialization segment.
6848 * @param track {object} track metadata configuration
6849 * @param options {object} transmuxer options object
6850 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
6851 * in the source; false to adjust the first segment to start at 0.
6852 */
6853 _AudioSegmentStream = function AudioSegmentStream(track, options) {
6854 var adtsFrames = [],
6855 sequenceNumber = 0,
6856 earliestAllowedDts = 0,
6857 audioAppendStartTs = 0,
6858 videoBaseMediaDecodeTime = Infinity;
6859
6860 options = options || {};
6861
6862 _AudioSegmentStream.prototype.init.call(this);
6863
6864 this.push = function (data) {
6865 trackDecodeInfo.collectDtsInfo(track, data);
6866
6867 if (track) {
6868 audioProperties.forEach(function (prop) {
6869 track[prop] = data[prop];
6870 });
6871 }
6872
6873 // buffer audio data until end() is called
6874 adtsFrames.push(data);
6875 };
6876
6877 this.setEarliestDts = function (earliestDts) {
6878 earliestAllowedDts = earliestDts;
6879 };
6880
6881 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
6882 videoBaseMediaDecodeTime = baseMediaDecodeTime;
6883 };
6884
6885 this.setAudioAppendStart = function (timestamp) {
6886 audioAppendStartTs = timestamp;
6887 };
6888
6889 this.flush = function () {
6890 var frames, moof, mdat, boxes, frameDuration;
6891
6892 // return early if no audio data has been observed
6893 if (adtsFrames.length === 0) {
6894 this.trigger('done', 'AudioSegmentStream');
6895 return;
6896 }
6897
6898 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
6899 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
6900
6901 audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime);
6902
6903 // we have to build the index from byte locations to
6904 // samples (that is, adts frames) in the audio data
6905 track.samples = audioFrameUtils.generateSampleTable(frames);
6906
6907 // concatenate the audio data to constuct the mdat
6908 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
6909
6910 adtsFrames = [];
6911
6912 moof = mp4Generator.moof(sequenceNumber, [track]);
6913 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
6914
6915 // bump the sequence number for next time
6916 sequenceNumber++;
6917
6918 boxes.set(moof);
6919 boxes.set(mdat, moof.byteLength);
6920
6921 trackDecodeInfo.clearDtsInfo(track);
6922
6923 frameDuration = Math.ceil(ONE_SECOND_IN_TS$3 * 1024 / track.samplerate);
6924
6925 // TODO this check was added to maintain backwards compatibility (particularly with
6926 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
6927 // valid use-case where an init segment/data should be triggered without associated
6928 // frames. Leaving for now, but should be looked into.
6929 if (frames.length) {
6930 this.trigger('timingInfo', {
6931 start: frames[0].pts,
6932 end: frames[0].pts + frames.length * frameDuration
6933 });
6934 }
6935 this.trigger('data', { track: track, boxes: boxes });
6936 this.trigger('done', 'AudioSegmentStream');
6937 };
6938
6939 this.reset = function () {
6940 trackDecodeInfo.clearDtsInfo(track);
6941 adtsFrames = [];
6942 this.trigger('reset');
6943 };
6944 };
6945
6946 _AudioSegmentStream.prototype = new stream();
6947
6948 /**
6949 * Constructs a single-track, ISO BMFF media segment from H264 data
6950 * events. The output of this stream can be fed to a SourceBuffer
6951 * configured with a suitable initialization segment.
6952 * @param track {object} track metadata configuration
6953 * @param options {object} transmuxer options object
6954 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
6955 * gopsToAlignWith list when attempting to align gop pts
6956 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
6957 * in the source; false to adjust the first segment to start at 0.
6958 */
6959 _VideoSegmentStream = function VideoSegmentStream(track, options) {
6960 var sequenceNumber = 0,
6961 nalUnits = [],
6962 gopsToAlignWith = [],
6963 config,
6964 pps;
6965
6966 options = options || {};
6967
6968 _VideoSegmentStream.prototype.init.call(this);
6969
6970 delete track.minPTS;
6971
6972 this.gopCache_ = [];
6973
6974 /**
6975 * Constructs a ISO BMFF segment given H264 nalUnits
6976 * @param {Object} nalUnit A data event representing a nalUnit
6977 * @param {String} nalUnit.nalUnitType
6978 * @param {Object} nalUnit.config Properties for a mp4 track
6979 * @param {Uint8Array} nalUnit.data The nalUnit bytes
6980 * @see lib/codecs/h264.js
6981 **/
6982 this.push = function (nalUnit) {
6983 trackDecodeInfo.collectDtsInfo(track, nalUnit);
6984
6985 // record the track config
6986 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
6987 config = nalUnit.config;
6988 track.sps = [nalUnit.data];
6989
6990 videoProperties.forEach(function (prop) {
6991 track[prop] = config[prop];
6992 }, this);
6993 }
6994
6995 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
6996 pps = nalUnit.data;
6997 track.pps = [nalUnit.data];
6998 }
6999
7000 // buffer video until flush() is called
7001 nalUnits.push(nalUnit);
7002 };
7003
7004 /**
7005 * Pass constructed ISO BMFF track and boxes on to the
7006 * next stream in the pipeline
7007 **/
7008 this.flush = function () {
7009 var frames,
7010 gopForFusion,
7011 gops,
7012 moof,
7013 mdat,
7014 boxes,
7015 prependedContentDuration = 0,
7016 firstGop,
7017 lastGop;
7018
7019 // Throw away nalUnits at the start of the byte stream until
7020 // we find the first AUD
7021 while (nalUnits.length) {
7022 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
7023 break;
7024 }
7025 nalUnits.shift();
7026 }
7027
7028 // Return early if no video data has been observed
7029 if (nalUnits.length === 0) {
7030 this.resetStream_();
7031 this.trigger('done', 'VideoSegmentStream');
7032 return;
7033 }
7034
7035 // Organize the raw nal-units into arrays that represent
7036 // higher-level constructs such as frames and gops
7037 // (group-of-pictures)
7038 frames = frameUtils.groupNalsIntoFrames(nalUnits);
7039 gops = frameUtils.groupFramesIntoGops(frames);
7040
7041 // If the first frame of this fragment is not a keyframe we have
7042 // a problem since MSE (on Chrome) requires a leading keyframe.
7043 //
7044 // We have two approaches to repairing this situation:
7045 // 1) GOP-FUSION:
7046 // This is where we keep track of the GOPS (group-of-pictures)
7047 // from previous fragments and attempt to find one that we can
7048 // prepend to the current fragment in order to create a valid
7049 // fragment.
7050 // 2) KEYFRAME-PULLING:
7051 // Here we search for the first keyframe in the fragment and
7052 // throw away all the frames between the start of the fragment
7053 // and that keyframe. We then extend the duration and pull the
7054 // PTS of the keyframe forward so that it covers the time range
7055 // of the frames that were disposed of.
7056 //
7057 // #1 is far prefereable over #2 which can cause "stuttering" but
7058 // requires more things to be just right.
7059 if (!gops[0][0].keyFrame) {
7060 // Search for a gop for fusion from our gopCache
7061 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
7062
7063 if (gopForFusion) {
7064 // in order to provide more accurate timing information about the segment, save
7065 // the number of seconds prepended to the original segment due to GOP fusion
7066 prependedContentDuration = gopForFusion.duration;
7067
7068 gops.unshift(gopForFusion);
7069 // Adjust Gops' metadata to account for the inclusion of the
7070 // new gop at the beginning
7071 gops.byteLength += gopForFusion.byteLength;
7072 gops.nalCount += gopForFusion.nalCount;
7073 gops.pts = gopForFusion.pts;
7074 gops.dts = gopForFusion.dts;
7075 gops.duration += gopForFusion.duration;
7076 } else {
7077 // If we didn't find a candidate gop fall back to keyframe-pulling
7078 gops = frameUtils.extendFirstKeyFrame(gops);
7079 }
7080 }
7081
7082 // Trim gops to align with gopsToAlignWith
7083 if (gopsToAlignWith.length) {
7084 var alignedGops;
7085
7086 if (options.alignGopsAtEnd) {
7087 alignedGops = this.alignGopsAtEnd_(gops);
7088 } else {
7089 alignedGops = this.alignGopsAtStart_(gops);
7090 }
7091
7092 if (!alignedGops) {
7093 // save all the nals in the last GOP into the gop cache
7094 this.gopCache_.unshift({
7095 gop: gops.pop(),
7096 pps: track.pps,
7097 sps: track.sps
7098 });
7099
7100 // Keep a maximum of 6 GOPs in the cache
7101 this.gopCache_.length = Math.min(6, this.gopCache_.length);
7102
7103 // Clear nalUnits
7104 nalUnits = [];
7105
7106 // return early no gops can be aligned with desired gopsToAlignWith
7107 this.resetStream_();
7108 this.trigger('done', 'VideoSegmentStream');
7109 return;
7110 }
7111
7112 // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
7113 // when recalculated before sending off to CoalesceStream
7114 trackDecodeInfo.clearDtsInfo(track);
7115
7116 gops = alignedGops;
7117 }
7118
7119 trackDecodeInfo.collectDtsInfo(track, gops);
7120
7121 // First, we have to build the index from byte locations to
7122 // samples (that is, frames) in the video data
7123 track.samples = frameUtils.generateSampleTable(gops);
7124
7125 // Concatenate the video data and construct the mdat
7126 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
7127
7128 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
7129
7130 this.trigger('processedGopsInfo', gops.map(function (gop) {
7131 return {
7132 pts: gop.pts,
7133 dts: gop.dts,
7134 byteLength: gop.byteLength
7135 };
7136 }));
7137
7138 firstGop = gops[0];
7139 lastGop = gops[gops.length - 1];
7140
7141 this.trigger('segmentTimingInfo', generateVideoSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
7142
7143 this.trigger('timingInfo', {
7144 start: gops[0].pts,
7145 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
7146 });
7147
7148 // save all the nals in the last GOP into the gop cache
7149 this.gopCache_.unshift({
7150 gop: gops.pop(),
7151 pps: track.pps,
7152 sps: track.sps
7153 });
7154
7155 // Keep a maximum of 6 GOPs in the cache
7156 this.gopCache_.length = Math.min(6, this.gopCache_.length);
7157
7158 // Clear nalUnits
7159 nalUnits = [];
7160
7161 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
7162 this.trigger('timelineStartInfo', track.timelineStartInfo);
7163
7164 moof = mp4Generator.moof(sequenceNumber, [track]);
7165
7166 // it would be great to allocate this array up front instead of
7167 // throwing away hundreds of media segment fragments
7168 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
7169
7170 // Bump the sequence number for next time
7171 sequenceNumber++;
7172
7173 boxes.set(moof);
7174 boxes.set(mdat, moof.byteLength);
7175
7176 this.trigger('data', { track: track, boxes: boxes });
7177
7178 this.resetStream_();
7179
7180 // Continue with the flush process now
7181 this.trigger('done', 'VideoSegmentStream');
7182 };
7183
7184 this.reset = function () {
7185 this.resetStream_();
7186 nalUnits = [];
7187 this.gopCache_.length = 0;
7188 gopsToAlignWith.length = 0;
7189 this.trigger('reset');
7190 };
7191
7192 this.resetStream_ = function () {
7193 trackDecodeInfo.clearDtsInfo(track);
7194
7195 // reset config and pps because they may differ across segments
7196 // for instance, when we are rendition switching
7197 config = undefined;
7198 pps = undefined;
7199 };
7200
7201 // Search for a candidate Gop for gop-fusion from the gop cache and
7202 // return it or return null if no good candidate was found
7203 this.getGopForFusion_ = function (nalUnit) {
7204 var halfSecond = 45000,
7205
7206 // Half-a-second in a 90khz clock
7207 allowableOverlap = 10000,
7208
7209 // About 3 frames @ 30fps
7210 nearestDistance = Infinity,
7211 dtsDistance,
7212 nearestGopObj,
7213 currentGop,
7214 currentGopObj,
7215 i;
7216
7217 // Search for the GOP nearest to the beginning of this nal unit
7218 for (i = 0; i < this.gopCache_.length; i++) {
7219 currentGopObj = this.gopCache_[i];
7220 currentGop = currentGopObj.gop;
7221
7222 // Reject Gops with different SPS or PPS
7223 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
7224 continue;
7225 }
7226
7227 // Reject Gops that would require a negative baseMediaDecodeTime
7228 if (currentGop.dts < track.timelineStartInfo.dts) {
7229 continue;
7230 }
7231
7232 // The distance between the end of the gop and the start of the nalUnit
7233 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration;
7234
7235 // Only consider GOPS that start before the nal unit and end within
7236 // a half-second of the nal unit
7237 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
7238
7239 // Always use the closest GOP we found if there is more than
7240 // one candidate
7241 if (!nearestGopObj || nearestDistance > dtsDistance) {
7242 nearestGopObj = currentGopObj;
7243 nearestDistance = dtsDistance;
7244 }
7245 }
7246 }
7247
7248 if (nearestGopObj) {
7249 return nearestGopObj.gop;
7250 }
7251 return null;
7252 };
7253
7254 // trim gop list to the first gop found that has a matching pts with a gop in the list
7255 // of gopsToAlignWith starting from the START of the list
7256 this.alignGopsAtStart_ = function (gops) {
7257 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
7258
7259 byteLength = gops.byteLength;
7260 nalCount = gops.nalCount;
7261 duration = gops.duration;
7262 alignIndex = gopIndex = 0;
7263
7264 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
7265 align = gopsToAlignWith[alignIndex];
7266 gop = gops[gopIndex];
7267
7268 if (align.pts === gop.pts) {
7269 break;
7270 }
7271
7272 if (gop.pts > align.pts) {
7273 // this current gop starts after the current gop we want to align on, so increment
7274 // align index
7275 alignIndex++;
7276 continue;
7277 }
7278
7279 // current gop starts before the current gop we want to align on. so increment gop
7280 // index
7281 gopIndex++;
7282 byteLength -= gop.byteLength;
7283 nalCount -= gop.nalCount;
7284 duration -= gop.duration;
7285 }
7286
7287 if (gopIndex === 0) {
7288 // no gops to trim
7289 return gops;
7290 }
7291
7292 if (gopIndex === gops.length) {
7293 // all gops trimmed, skip appending all gops
7294 return null;
7295 }
7296
7297 alignedGops = gops.slice(gopIndex);
7298 alignedGops.byteLength = byteLength;
7299 alignedGops.duration = duration;
7300 alignedGops.nalCount = nalCount;
7301 alignedGops.pts = alignedGops[0].pts;
7302 alignedGops.dts = alignedGops[0].dts;
7303
7304 return alignedGops;
7305 };
7306
7307 // trim gop list to the first gop found that has a matching pts with a gop in the list
7308 // of gopsToAlignWith starting from the END of the list
7309 this.alignGopsAtEnd_ = function (gops) {
7310 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
7311
7312 alignIndex = gopsToAlignWith.length - 1;
7313 gopIndex = gops.length - 1;
7314 alignEndIndex = null;
7315 matchFound = false;
7316
7317 while (alignIndex >= 0 && gopIndex >= 0) {
7318 align = gopsToAlignWith[alignIndex];
7319 gop = gops[gopIndex];
7320
7321 if (align.pts === gop.pts) {
7322 matchFound = true;
7323 break;
7324 }
7325
7326 if (align.pts > gop.pts) {
7327 alignIndex--;
7328 continue;
7329 }
7330
7331 if (alignIndex === gopsToAlignWith.length - 1) {
7332 // gop.pts is greater than the last alignment candidate. If no match is found
7333 // by the end of this loop, we still want to append gops that come after this
7334 // point
7335 alignEndIndex = gopIndex;
7336 }
7337
7338 gopIndex--;
7339 }
7340
7341 if (!matchFound && alignEndIndex === null) {
7342 return null;
7343 }
7344
7345 var trimIndex;
7346
7347 if (matchFound) {
7348 trimIndex = gopIndex;
7349 } else {
7350 trimIndex = alignEndIndex;
7351 }
7352
7353 if (trimIndex === 0) {
7354 return gops;
7355 }
7356
7357 var alignedGops = gops.slice(trimIndex);
7358 var metadata = alignedGops.reduce(function (total, gop) {
7359 total.byteLength += gop.byteLength;
7360 total.duration += gop.duration;
7361 total.nalCount += gop.nalCount;
7362 return total;
7363 }, { byteLength: 0, duration: 0, nalCount: 0 });
7364
7365 alignedGops.byteLength = metadata.byteLength;
7366 alignedGops.duration = metadata.duration;
7367 alignedGops.nalCount = metadata.nalCount;
7368 alignedGops.pts = alignedGops[0].pts;
7369 alignedGops.dts = alignedGops[0].dts;
7370
7371 return alignedGops;
7372 };
7373
7374 this.alignGopsWith = function (newGopsToAlignWith) {
7375 gopsToAlignWith = newGopsToAlignWith;
7376 };
7377 };
7378
7379 _VideoSegmentStream.prototype = new stream();
7380
7381 /**
7382 * A Stream that can combine multiple streams (ie. audio & video)
7383 * into a single output segment for MSE. Also supports audio-only
7384 * and video-only streams.
7385 * @param options {object} transmuxer options object
7386 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
7387 * in the source; false to adjust the first segment to start at media timeline start.
7388 */
7389 _CoalesceStream = function CoalesceStream(options, metadataStream) {
7390 // Number of Tracks per output segment
7391 // If greater than 1, we combine multiple
7392 // tracks into a single segment
7393 this.numberOfTracks = 0;
7394 this.metadataStream = metadataStream;
7395
7396 options = options || {};
7397
7398 if (typeof options.remux !== 'undefined') {
7399 this.remuxTracks = !!options.remux;
7400 } else {
7401 this.remuxTracks = true;
7402 }
7403
7404 if (typeof options.keepOriginalTimestamps === 'boolean') {
7405 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
7406 } else {
7407 this.keepOriginalTimestamps = false;
7408 }
7409
7410 this.pendingTracks = [];
7411 this.videoTrack = null;
7412 this.pendingBoxes = [];
7413 this.pendingCaptions = [];
7414 this.pendingMetadata = [];
7415 this.pendingBytes = 0;
7416 this.emittedTracks = 0;
7417
7418 _CoalesceStream.prototype.init.call(this);
7419
7420 // Take output from multiple
7421 this.push = function (output) {
7422 // buffer incoming captions until the associated video segment
7423 // finishes
7424 if (output.text) {
7425 return this.pendingCaptions.push(output);
7426 }
7427 // buffer incoming id3 tags until the final flush
7428 if (output.frames) {
7429 return this.pendingMetadata.push(output);
7430 }
7431
7432 // Add this track to the list of pending tracks and store
7433 // important information required for the construction of
7434 // the final segment
7435 this.pendingTracks.push(output.track);
7436 this.pendingBytes += output.boxes.byteLength;
7437
7438 // TODO: is there an issue for this against chrome?
7439 // We unshift audio and push video because
7440 // as of Chrome 75 when switching from
7441 // one init segment to another if the video
7442 // mdat does not appear after the audio mdat
7443 // only audio will play for the duration of our transmux.
7444 if (output.track.type === 'video') {
7445 this.videoTrack = output.track;
7446 this.pendingBoxes.push(output.boxes);
7447 }
7448 if (output.track.type === 'audio') {
7449 this.audioTrack = output.track;
7450 this.pendingBoxes.unshift(output.boxes);
7451 }
7452 };
7453 };
7454
7455 _CoalesceStream.prototype = new stream();
7456 _CoalesceStream.prototype.flush = function (flushSource) {
7457 var offset = 0,
7458 event = {
7459 captions: [],
7460 captionStreams: {},
7461 metadata: [],
7462 info: {}
7463 },
7464 caption,
7465 id3,
7466 initSegment,
7467 timelineStartPts = 0,
7468 i;
7469
7470 if (this.pendingTracks.length < this.numberOfTracks) {
7471 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
7472 // Return because we haven't received a flush from a data-generating
7473 // portion of the segment (meaning that we have only recieved meta-data
7474 // or captions.)
7475 return;
7476 } else if (this.remuxTracks) {
7477 // Return until we have enough tracks from the pipeline to remux (if we
7478 // are remuxing audio and video into a single MP4)
7479 return;
7480 } else if (this.pendingTracks.length === 0) {
7481 // In the case where we receive a flush without any data having been
7482 // received we consider it an emitted track for the purposes of coalescing
7483 // `done` events.
7484 // We do this for the case where there is an audio and video track in the
7485 // segment but no audio data. (seen in several playlists with alternate
7486 // audio tracks and no audio present in the main TS segments.)
7487 this.emittedTracks++;
7488
7489 if (this.emittedTracks >= this.numberOfTracks) {
7490 this.trigger('done');
7491 this.emittedTracks = 0;
7492 }
7493 return;
7494 }
7495 }
7496
7497 if (this.videoTrack) {
7498 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
7499 videoProperties.forEach(function (prop) {
7500 event.info[prop] = this.videoTrack[prop];
7501 }, this);
7502 } else if (this.audioTrack) {
7503 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
7504 audioProperties.forEach(function (prop) {
7505 event.info[prop] = this.audioTrack[prop];
7506 }, this);
7507 }
7508
7509 if (this.videoTrack || this.audioTrack) {
7510 if (this.pendingTracks.length === 1) {
7511 event.type = this.pendingTracks[0].type;
7512 } else {
7513 event.type = 'combined';
7514 }
7515
7516 this.emittedTracks += this.pendingTracks.length;
7517
7518 initSegment = mp4Generator.initSegment(this.pendingTracks);
7519
7520 // Create a new typed array to hold the init segment
7521 event.initSegment = new Uint8Array(initSegment.byteLength);
7522
7523 // Create an init segment containing a moov
7524 // and track definitions
7525 event.initSegment.set(initSegment);
7526
7527 // Create a new typed array to hold the moof+mdats
7528 event.data = new Uint8Array(this.pendingBytes);
7529
7530 // Append each moof+mdat (one per track) together
7531 for (i = 0; i < this.pendingBoxes.length; i++) {
7532 event.data.set(this.pendingBoxes[i], offset);
7533 offset += this.pendingBoxes[i].byteLength;
7534 }
7535
7536 // Translate caption PTS times into second offsets to match the
7537 // video timeline for the segment, and add track info
7538 for (i = 0; i < this.pendingCaptions.length; i++) {
7539 caption = this.pendingCaptions[i];
7540 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
7541 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
7542
7543 event.captionStreams[caption.stream] = true;
7544 event.captions.push(caption);
7545 }
7546
7547 // Translate ID3 frame PTS times into second offsets to match the
7548 // video timeline for the segment
7549 for (i = 0; i < this.pendingMetadata.length; i++) {
7550 id3 = this.pendingMetadata[i];
7551 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
7552
7553 event.metadata.push(id3);
7554 }
7555
7556 // We add this to every single emitted segment even though we only need
7557 // it for the first
7558 event.metadata.dispatchType = this.metadataStream.dispatchType;
7559
7560 // Reset stream state
7561 this.pendingTracks.length = 0;
7562 this.videoTrack = null;
7563 this.pendingBoxes.length = 0;
7564 this.pendingCaptions.length = 0;
7565 this.pendingBytes = 0;
7566 this.pendingMetadata.length = 0;
7567
7568 // Emit the built segment
7569 // We include captions and ID3 tags for backwards compatibility,
7570 // ideally we should send only video and audio in the data event
7571 this.trigger('data', event);
7572 // Emit each caption to the outside world
7573 // Ideally, this would happen immediately on parsing captions,
7574 // but we need to ensure that video data is sent back first
7575 // so that caption timing can be adjusted to match video timing
7576 for (i = 0; i < event.captions.length; i++) {
7577 caption = event.captions[i];
7578
7579 this.trigger('caption', caption);
7580 }
7581 // Emit each id3 tag to the outside world
7582 // Ideally, this would happen immediately on parsing the tag,
7583 // but we need to ensure that video data is sent back first
7584 // so that ID3 frame timing can be adjusted to match video timing
7585 for (i = 0; i < event.metadata.length; i++) {
7586 id3 = event.metadata[i];
7587
7588 this.trigger('id3Frame', id3);
7589 }
7590 }
7591
7592 // Only emit `done` if all tracks have been flushed and emitted
7593 if (this.emittedTracks >= this.numberOfTracks) {
7594 this.trigger('done');
7595 this.emittedTracks = 0;
7596 }
7597 };
7598
7599 _CoalesceStream.prototype.setRemux = function (val) {
7600 this.remuxTracks = val;
7601 };
7602 /**
7603 * A Stream that expects MP2T binary data as input and produces
7604 * corresponding media segments, suitable for use with Media Source
7605 * Extension (MSE) implementations that support the ISO BMFF byte
7606 * stream format, like Chrome.
7607 */
7608 _Transmuxer = function Transmuxer(options) {
7609 var self = this,
7610 hasFlushed = true,
7611 videoTrack,
7612 audioTrack;
7613
7614 _Transmuxer.prototype.init.call(this);
7615
7616 options = options || {};
7617 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
7618 this.transmuxPipeline_ = {};
7619
7620 this.setupAacPipeline = function () {
7621 var pipeline = {};
7622 this.transmuxPipeline_ = pipeline;
7623
7624 pipeline.type = 'aac';
7625 pipeline.metadataStream = new m2ts_1.MetadataStream();
7626
7627 // set up the parsing pipeline
7628 pipeline.aacStream = new aac();
7629 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
7630 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
7631 pipeline.adtsStream = new adts();
7632 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
7633 pipeline.headOfPipeline = pipeline.aacStream;
7634
7635 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
7636 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
7637
7638 pipeline.metadataStream.on('timestamp', function (frame) {
7639 pipeline.aacStream.setTimestamp(frame.timeStamp);
7640 });
7641
7642 pipeline.aacStream.on('data', function (data) {
7643 if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
7644 audioTrack = audioTrack || {
7645 timelineStartInfo: {
7646 baseMediaDecodeTime: self.baseMediaDecodeTime
7647 },
7648 codec: 'adts',
7649 type: 'audio'
7650 };
7651 // hook up the audio segment stream to the first track with aac data
7652 pipeline.coalesceStream.numberOfTracks++;
7653 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
7654
7655 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
7656
7657 // Set up the final part of the audio pipeline
7658 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
7659 }
7660
7661 // emit pmt info
7662 self.trigger('trackinfo', {
7663 hasAudio: !!audioTrack,
7664 hasVideo: !!videoTrack
7665 });
7666 });
7667
7668 // Re-emit any data coming from the coalesce stream to the outside world
7669 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
7670 // Let the consumer know we have finished flushing the entire pipeline
7671 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
7672 };
7673
7674 this.setupTsPipeline = function () {
7675 var pipeline = {};
7676 this.transmuxPipeline_ = pipeline;
7677
7678 pipeline.type = 'ts';
7679 pipeline.metadataStream = new m2ts_1.MetadataStream();
7680
7681 // set up the parsing pipeline
7682 pipeline.packetStream = new m2ts_1.TransportPacketStream();
7683 pipeline.parseStream = new m2ts_1.TransportParseStream();
7684 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
7685 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
7686 pipeline.adtsStream = new adts();
7687 pipeline.h264Stream = new H264Stream();
7688 pipeline.captionStream = new m2ts_1.CaptionStream();
7689 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
7690 pipeline.headOfPipeline = pipeline.packetStream;
7691
7692 // disassemble MPEG2-TS packets into elementary streams
7693 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream);
7694
7695 // !!THIS ORDER IS IMPORTANT!!
7696 // demux the streams
7697 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
7698
7699 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
7700
7701 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
7702
7703 // Hook up CEA-608/708 caption stream
7704 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
7705
7706 pipeline.elementaryStream.on('data', function (data) {
7707 var i;
7708
7709 var baseMediaDecodeTime = !options.keepOriginalTimestamps ? self.baseMediaDecodeTime : 0;
7710
7711 if (data.type === 'metadata') {
7712 i = data.tracks.length;
7713
7714 // scan the tracks listed in the metadata
7715 while (i--) {
7716 if (!videoTrack && data.tracks[i].type === 'video') {
7717 videoTrack = data.tracks[i];
7718 videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
7719 } else if (!audioTrack && data.tracks[i].type === 'audio') {
7720 audioTrack = data.tracks[i];
7721 audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
7722 }
7723 }
7724
7725 // hook up the video segment stream to the first track with h264 data
7726 if (videoTrack && !pipeline.videoSegmentStream) {
7727 pipeline.coalesceStream.numberOfTracks++;
7728 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
7729
7730 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
7731 // When video emits timelineStartInfo data after a flush, we forward that
7732 // info to the AudioSegmentStream, if it exists, because video timeline
7733 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
7734 // because this is a particularly subtle form of timestamp alteration.
7735 if (audioTrack && !options.keepOriginalTimestamps) {
7736 audioTrack.timelineStartInfo = timelineStartInfo;
7737 // On the first segment we trim AAC frames that exist before the
7738 // very earliest DTS we have seen in video because Chrome will
7739 // interpret any video track with a baseMediaDecodeTime that is
7740 // non-zero as a gap.
7741 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
7742 }
7743 });
7744
7745 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
7746 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
7747
7748 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
7749 if (audioTrack) {
7750 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
7751 }
7752 });
7753
7754 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo'));
7755
7756 // Set up the final part of the video pipeline
7757 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
7758 }
7759
7760 if (audioTrack && !pipeline.audioSegmentStream) {
7761 // hook up the audio segment stream to the first track with aac data
7762 pipeline.coalesceStream.numberOfTracks++;
7763 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
7764
7765 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
7766
7767 // Set up the final part of the audio pipeline
7768 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
7769 }
7770
7771 // emit pmt info
7772 self.trigger('trackinfo', {
7773 hasAudio: !!audioTrack,
7774 hasVideo: !!videoTrack
7775 });
7776 }
7777 });
7778
7779 // Re-emit any data coming from the coalesce stream to the outside world
7780 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
7781 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
7782 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
7783
7784 self.trigger('id3Frame', id3Frame);
7785 });
7786 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption'));
7787 // Let the consumer know we have finished flushing the entire pipeline
7788 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
7789 };
7790
7791 // hook up the segment streams once track metadata is delivered
7792 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
7793 var pipeline = this.transmuxPipeline_;
7794
7795 this.baseMediaDecodeTime = baseMediaDecodeTime;
7796
7797 if (audioTrack) {
7798 audioTrack.timelineStartInfo.dts = undefined;
7799 audioTrack.timelineStartInfo.pts = undefined;
7800 trackDecodeInfo.clearDtsInfo(audioTrack);
7801 if (pipeline.audioTimestampRolloverStream) {
7802 pipeline.audioTimestampRolloverStream.discontinuity();
7803 }
7804 }
7805 if (videoTrack) {
7806 if (pipeline.videoSegmentStream) {
7807 pipeline.videoSegmentStream.gopCache_ = [];
7808 }
7809 videoTrack.timelineStartInfo.dts = undefined;
7810 videoTrack.timelineStartInfo.pts = undefined;
7811 trackDecodeInfo.clearDtsInfo(videoTrack);
7812 pipeline.captionStream.reset();
7813 }
7814
7815 if (pipeline.timestampRolloverStream) {
7816 pipeline.timestampRolloverStream.discontinuity();
7817 }
7818 };
7819
7820 this.setAudioAppendStart = function (timestamp) {
7821 if (audioTrack) {
7822 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
7823 }
7824 };
7825
7826 this.setRemux = function (val) {
7827 var pipeline = this.transmuxPipeline_;
7828
7829 options.remux = val;
7830
7831 if (pipeline && pipeline.coalesceStream) {
7832 pipeline.coalesceStream.setRemux(val);
7833 }
7834 };
7835
7836 this.alignGopsWith = function (gopsToAlignWith) {
7837 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
7838 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
7839 }
7840 };
7841
7842 // feed incoming data to the front of the parsing pipeline
7843 this.push = function (data) {
7844 if (hasFlushed) {
7845 var isAac = isLikelyAacData$1(data);
7846
7847 if (isAac && this.transmuxPipeline_.type !== 'aac') {
7848 this.setupAacPipeline();
7849 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
7850 this.setupTsPipeline();
7851 }
7852 hasFlushed = false;
7853 }
7854 this.transmuxPipeline_.headOfPipeline.push(data);
7855 };
7856
7857 // flush any buffered data
7858 this.flush = function () {
7859 hasFlushed = true;
7860 // Start at the top of the pipeline and flush all pending work
7861 this.transmuxPipeline_.headOfPipeline.flush();
7862 };
7863
7864 this.endTimeline = function () {
7865 this.transmuxPipeline_.headOfPipeline.endTimeline();
7866 };
7867
7868 this.reset = function () {
7869 if (this.transmuxPipeline_.headOfPipeline) {
7870 this.transmuxPipeline_.headOfPipeline.reset();
7871 }
7872 };
7873
7874 // Caption data has to be reset when seeking outside buffered range
7875 this.resetCaptions = function () {
7876 if (this.transmuxPipeline_.captionStream) {
7877 this.transmuxPipeline_.captionStream.reset();
7878 }
7879 };
7880 };
7881 _Transmuxer.prototype = new stream();
7882
7883 var transmuxer = {
7884 Transmuxer: _Transmuxer,
7885 VideoSegmentStream: _VideoSegmentStream,
7886 AudioSegmentStream: _AudioSegmentStream,
7887 AUDIO_PROPERTIES: audioProperties,
7888 VIDEO_PROPERTIES: videoProperties,
7889 // exported for testing
7890 generateVideoSegmentTimingInfo: generateVideoSegmentTimingInfo
7891 };
7892
7893 var classCallCheck = function classCallCheck(instance, Constructor) {
7894 if (!(instance instanceof Constructor)) {
7895 throw new TypeError("Cannot call a class as a function");
7896 }
7897 };
7898
7899 var createClass = function () {
7900 function defineProperties(target, props) {
7901 for (var i = 0; i < props.length; i++) {
7902 var descriptor = props[i];
7903 descriptor.enumerable = descriptor.enumerable || false;
7904 descriptor.configurable = true;
7905 if ("value" in descriptor) descriptor.writable = true;
7906 Object.defineProperty(target, descriptor.key, descriptor);
7907 }
7908 }
7909
7910 return function (Constructor, protoProps, staticProps) {
7911 if (protoProps) defineProperties(Constructor.prototype, protoProps);
7912 if (staticProps) defineProperties(Constructor, staticProps);
7913 return Constructor;
7914 };
7915 }();
7916
7917 /**
7918 * @file transmuxer-worker.js
7919 */
7920
7921 /**
7922 * Re-emits transmuxer events by converting them into messages to the
7923 * world outside the worker.
7924 *
7925 * @param {Object} transmuxer the transmuxer to wire events on
7926 * @private
7927 */
7928 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer$$1) {
7929 transmuxer$$1.on('data', function (segment) {
7930 // transfer ownership of the underlying ArrayBuffer
7931 // instead of doing a copy to save memory
7932 // ArrayBuffers are transferable but generic TypedArrays are not
7933 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
7934 var initArray = segment.initSegment;
7935
7936 segment.initSegment = {
7937 data: initArray.buffer,
7938 byteOffset: initArray.byteOffset,
7939 byteLength: initArray.byteLength
7940 };
7941
7942 var typedArray = segment.data;
7943
7944 segment.data = typedArray.buffer;
7945 self.postMessage({
7946 action: 'data',
7947 segment: segment,
7948 byteOffset: typedArray.byteOffset,
7949 byteLength: typedArray.byteLength
7950 }, [segment.data]);
7951 });
7952
7953 if (transmuxer$$1.captionStream) {
7954 transmuxer$$1.captionStream.on('data', function (caption) {
7955 self.postMessage({
7956 action: 'caption',
7957 data: caption
7958 });
7959 });
7960 }
7961
7962 transmuxer$$1.on('done', function (data) {
7963 self.postMessage({ action: 'done' });
7964 });
7965
7966 transmuxer$$1.on('gopInfo', function (gopInfo) {
7967 self.postMessage({
7968 action: 'gopInfo',
7969 gopInfo: gopInfo
7970 });
7971 });
7972
7973 transmuxer$$1.on('videoSegmentTimingInfo', function (videoSegmentTimingInfo) {
7974 self.postMessage({
7975 action: 'videoSegmentTimingInfo',
7976 videoSegmentTimingInfo: videoSegmentTimingInfo
7977 });
7978 });
7979 };
7980
7981 /**
7982 * All incoming messages route through this hash. If no function exists
7983 * to handle an incoming message, then we ignore the message.
7984 *
7985 * @class MessageHandlers
7986 * @param {Object} options the options to initialize with
7987 */
7988
7989 var MessageHandlers = function () {
7990 function MessageHandlers(self, options) {
7991 classCallCheck(this, MessageHandlers);
7992
7993 this.options = options || {};
7994 this.self = self;
7995 this.init();
7996 }
7997
7998 /**
7999 * initialize our web worker and wire all the events.
8000 */
8001
8002 createClass(MessageHandlers, [{
8003 key: 'init',
8004 value: function init() {
8005 if (this.transmuxer) {
8006 this.transmuxer.dispose();
8007 }
8008 this.transmuxer = new transmuxer.Transmuxer(this.options);
8009 wireTransmuxerEvents(this.self, this.transmuxer);
8010 }
8011
8012 /**
8013 * Adds data (a ts segment) to the start of the transmuxer pipeline for
8014 * processing.
8015 *
8016 * @param {ArrayBuffer} data data to push into the muxer
8017 */
8018
8019 }, {
8020 key: 'push',
8021 value: function push(data) {
8022 // Cast array buffer to correct type for transmuxer
8023 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
8024
8025 this.transmuxer.push(segment);
8026 }
8027
8028 /**
8029 * Recreate the transmuxer so that the next segment added via `push`
8030 * start with a fresh transmuxer.
8031 */
8032
8033 }, {
8034 key: 'reset',
8035 value: function reset() {
8036 this.init();
8037 }
8038
8039 /**
8040 * Set the value that will be used as the `baseMediaDecodeTime` time for the
8041 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
8042 * set relative to the first based on the PTS values.
8043 *
8044 * @param {Object} data used to set the timestamp offset in the muxer
8045 */
8046
8047 }, {
8048 key: 'setTimestampOffset',
8049 value: function setTimestampOffset(data) {
8050 var timestampOffset = data.timestampOffset || 0;
8051
8052 this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
8053 }
8054 }, {
8055 key: 'setAudioAppendStart',
8056 value: function setAudioAppendStart(data) {
8057 this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
8058 }
8059
8060 /**
8061 * Forces the pipeline to finish processing the last segment and emit it's
8062 * results.
8063 *
8064 * @param {Object} data event data, not really used
8065 */
8066
8067 }, {
8068 key: 'flush',
8069 value: function flush(data) {
8070 this.transmuxer.flush();
8071 }
8072 }, {
8073 key: 'resetCaptions',
8074 value: function resetCaptions() {
8075 this.transmuxer.resetCaptions();
8076 }
8077 }, {
8078 key: 'alignGopsWith',
8079 value: function alignGopsWith(data) {
8080 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
8081 }
8082 }]);
8083 return MessageHandlers;
8084 }();
8085
8086 /**
8087 * Our web wroker interface so that things can talk to mux.js
8088 * that will be running in a web worker. the scope is passed to this by
8089 * webworkify.
8090 *
8091 * @param {Object} self the scope for the web worker
8092 */
8093
8094 var TransmuxerWorker = function TransmuxerWorker(self) {
8095 self.onmessage = function (event) {
8096 if (event.data.action === 'init' && event.data.options) {
8097 this.messageHandlers = new MessageHandlers(self, event.data.options);
8098 return;
8099 }
8100
8101 if (!this.messageHandlers) {
8102 this.messageHandlers = new MessageHandlers(self);
8103 }
8104
8105 if (event.data && event.data.action && event.data.action !== 'init') {
8106 if (this.messageHandlers[event.data.action]) {
8107 this.messageHandlers[event.data.action](event.data);
8108 }
8109 }
8110 };
8111 };
8112
8113 var transmuxerWorker = new TransmuxerWorker(self);
8114
8115 return transmuxerWorker;
8116 }();
8117});
8118
8119/**
8120 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
8121 * codec strings, or translating codec strings into objects that can be examined.
8122 */
8123
8124// Default codec parameters if none were provided for video and/or audio
8125var defaultCodecs = {
8126 videoCodec: 'avc1',
8127 videoObjectTypeIndicator: '.4d400d',
8128 // AAC-LC
8129 audioProfile: '2'
8130};
8131
8132/**
8133 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
8134 * `avc1.<hhhhhh>`
8135 *
8136 * @param {Array} codecs an array of codec strings to fix
8137 * @return {Array} the translated codec array
8138 * @private
8139 */
8140var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
8141 return codecs.map(function (codec) {
8142 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
8143 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
8144 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
8145
8146 return 'avc1.' + profileHex + '00' + avcLevelHex;
8147 });
8148 });
8149};
8150
8151/**
8152 * Parses a codec string to retrieve the number of codecs specified,
8153 * the video codec and object type indicator, and the audio profile.
8154 */
8155
8156var parseCodecs = function parseCodecs() {
8157 var codecs = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '';
8158
8159 var result = {
8160 codecCount: 0
8161 };
8162 var parsed = void 0;
8163
8164 result.codecCount = codecs.split(',').length;
8165 result.codecCount = result.codecCount || 2;
8166
8167 // parse the video codec
8168 parsed = /(^|\s|,)+(avc[13])([^ ,]*)/i.exec(codecs);
8169 if (parsed) {
8170 result.videoCodec = parsed[2];
8171 result.videoObjectTypeIndicator = parsed[3];
8172 }
8173
8174 // parse the last field of the audio codec
8175 result.audioProfile = /(^|\s|,)+mp4a.[0-9A-Fa-f]+\.([0-9A-Fa-f]+)/i.exec(codecs);
8176 result.audioProfile = result.audioProfile && result.audioProfile[2];
8177
8178 return result;
8179};
8180
8181/**
8182 * Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
8183 * standard `avc1.<hhhhhh>`.
8184 *
8185 * @param codecString {String} the codec string
8186 * @return {String} the codec string with old apple-style codecs replaced
8187 *
8188 * @private
8189 */
8190var mapLegacyAvcCodecs = function mapLegacyAvcCodecs(codecString) {
8191 return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
8192 return translateLegacyCodecs([match])[0];
8193 });
8194};
8195
8196/**
8197 * Build a media mime-type string from a set of parameters
8198 * @param {String} type either 'audio' or 'video'
8199 * @param {String} container either 'mp2t' or 'mp4'
8200 * @param {Array} codecs an array of codec strings to add
8201 * @return {String} a valid media mime-type
8202 */
8203var makeMimeTypeString = function makeMimeTypeString(type, container, codecs) {
8204 // The codecs array is filtered so that falsey values are
8205 // dropped and don't cause Array#join to create spurious
8206 // commas
8207 return type + '/' + container + '; codecs="' + codecs.filter(function (c) {
8208 return !!c;
8209 }).join(', ') + '"';
8210};
8211
8212/**
8213 * Returns the type container based on information in the playlist
8214 * @param {Playlist} media the current media playlist
8215 * @return {String} a valid media container type
8216 */
8217var getContainerType = function getContainerType(media) {
8218 // An initialization segment means the media playlist is an iframe
8219 // playlist or is using the mp4 container. We don't currently
8220 // support iframe playlists, so assume this is signalling mp4
8221 // fragments.
8222 if (media.segments && media.segments.length && media.segments[0].map) {
8223 return 'mp4';
8224 }
8225 return 'mp2t';
8226};
8227
8228/**
8229 * Returns a set of codec strings parsed from the playlist or the default
8230 * codec strings if no codecs were specified in the playlist
8231 * @param {Playlist} media the current media playlist
8232 * @return {Object} an object with the video and audio codecs
8233 */
8234var getCodecs = function getCodecs(media) {
8235 // if the codecs were explicitly specified, use them instead of the
8236 // defaults
8237 var mediaAttributes = media.attributes || {};
8238
8239 if (mediaAttributes.CODECS) {
8240 return parseCodecs(mediaAttributes.CODECS);
8241 }
8242 return defaultCodecs;
8243};
8244
8245var audioProfileFromDefault = function audioProfileFromDefault(master, audioGroupId) {
8246 if (!master.mediaGroups.AUDIO || !audioGroupId) {
8247 return null;
8248 }
8249
8250 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
8251
8252 if (!audioGroup) {
8253 return null;
8254 }
8255
8256 for (var name in audioGroup) {
8257 var audioType = audioGroup[name];
8258
8259 if (audioType.default && audioType.playlists) {
8260 // codec should be the same for all playlists within the audio type
8261 return parseCodecs(audioType.playlists[0].attributes.CODECS).audioProfile;
8262 }
8263 }
8264
8265 return null;
8266};
8267
8268/**
8269 * Calculates the MIME type strings for a working configuration of
8270 * SourceBuffers to play variant streams in a master playlist. If
8271 * there is no possible working configuration, an empty array will be
8272 * returned.
8273 *
8274 * @param master {Object} the m3u8 object for the master playlist
8275 * @param media {Object} the m3u8 object for the variant playlist
8276 * @return {Array} the MIME type strings. If the array has more than
8277 * one entry, the first element should be applied to the video
8278 * SourceBuffer and the second to the audio SourceBuffer.
8279 *
8280 * @private
8281 */
8282var mimeTypesForPlaylist = function mimeTypesForPlaylist(master, media) {
8283 var containerType = getContainerType(media);
8284 var codecInfo = getCodecs(media);
8285 var mediaAttributes = media.attributes || {};
8286 // Default condition for a traditional HLS (no demuxed audio/video)
8287 var isMuxed = true;
8288 var isMaat = false;
8289
8290 if (!media) {
8291 // Not enough information
8292 return [];
8293 }
8294
8295 if (master.mediaGroups.AUDIO && mediaAttributes.AUDIO) {
8296 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
8297
8298 // Handle the case where we are in a multiple-audio track scenario
8299 if (audioGroup) {
8300 isMaat = true;
8301 // Start with the everything demuxed then...
8302 isMuxed = false;
8303 // ...check to see if any audio group tracks are muxed (ie. lacking a uri)
8304 for (var groupId in audioGroup) {
8305 // either a uri is present (if the case of HLS and an external playlist), or
8306 // playlists is present (in the case of DASH where we don't have external audio
8307 // playlists)
8308 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
8309 isMuxed = true;
8310 break;
8311 }
8312 }
8313 }
8314 }
8315
8316 // HLS with multiple-audio tracks must always get an audio codec.
8317 // Put another way, there is no way to have a video-only multiple-audio HLS!
8318 if (isMaat && !codecInfo.audioProfile) {
8319 if (!isMuxed) {
8320 // It is possible for codecs to be specified on the audio media group playlist but
8321 // not on the rendition playlist. This is mostly the case for DASH, where audio and
8322 // video are always separate (and separately specified).
8323 codecInfo.audioProfile = audioProfileFromDefault(master, mediaAttributes.AUDIO);
8324 }
8325
8326 if (!codecInfo.audioProfile) {
8327 videojs.log.warn('Multiple audio tracks present but no audio codec string is specified. ' + 'Attempting to use the default audio codec (mp4a.40.2)');
8328 codecInfo.audioProfile = defaultCodecs.audioProfile;
8329 }
8330 }
8331
8332 // Generate the final codec strings from the codec object generated above
8333 var codecStrings = {};
8334
8335 if (codecInfo.videoCodec) {
8336 codecStrings.video = '' + codecInfo.videoCodec + codecInfo.videoObjectTypeIndicator;
8337 }
8338
8339 if (codecInfo.audioProfile) {
8340 codecStrings.audio = 'mp4a.40.' + codecInfo.audioProfile;
8341 }
8342
8343 // Finally, make and return an array with proper mime-types depending on
8344 // the configuration
8345 var justAudio = makeMimeTypeString('audio', containerType, [codecStrings.audio]);
8346 var justVideo = makeMimeTypeString('video', containerType, [codecStrings.video]);
8347 var bothVideoAudio = makeMimeTypeString('video', containerType, [codecStrings.video, codecStrings.audio]);
8348
8349 if (isMaat) {
8350 if (!isMuxed && codecStrings.video) {
8351 return [justVideo, justAudio];
8352 }
8353
8354 if (!isMuxed && !codecStrings.video) {
8355 // There is no muxed content and no video codec string, so this is an audio only
8356 // stream with alternate audio.
8357 return [justAudio, justAudio];
8358 }
8359
8360 // There exists the possiblity that this will return a `video/container`
8361 // mime-type for the first entry in the array even when there is only audio.
8362 // This doesn't appear to be a problem and simplifies the code.
8363 return [bothVideoAudio, justAudio];
8364 }
8365
8366 // If there is no video codec at all, always just return a single
8367 // audio/<container> mime-type
8368 if (!codecStrings.video) {
8369 return [justAudio];
8370 }
8371
8372 // When not using separate audio media groups, audio and video is
8373 // *always* muxed
8374 return [bothVideoAudio];
8375};
8376
8377/**
8378 * Parse a content type header into a type and parameters
8379 * object
8380 *
8381 * @param {String} type the content type header
8382 * @return {Object} the parsed content-type
8383 * @private
8384 */
8385var parseContentType = function parseContentType(type) {
8386 var object = { type: '', parameters: {} };
8387 var parameters = type.trim().split(';');
8388
8389 // first parameter should always be content-type
8390 object.type = parameters.shift().trim();
8391 parameters.forEach(function (parameter) {
8392 var pair = parameter.trim().split('=');
8393
8394 if (pair.length > 1) {
8395 var name = pair[0].replace(/"/g, '').trim();
8396 var value = pair[1].replace(/"/g, '').trim();
8397
8398 object.parameters[name] = value;
8399 }
8400 });
8401
8402 return object;
8403};
8404
8405/**
8406 * Check if a codec string refers to an audio codec.
8407 *
8408 * @param {String} codec codec string to check
8409 * @return {Boolean} if this is an audio codec
8410 * @private
8411 */
8412var isAudioCodec = function isAudioCodec(codec) {
8413 return (/mp4a\.\d+.\d+/i.test(codec)
8414 );
8415};
8416
8417/**
8418 * Check if a codec string refers to a video codec.
8419 *
8420 * @param {String} codec codec string to check
8421 * @return {Boolean} if this is a video codec
8422 * @private
8423 */
8424var isVideoCodec = function isVideoCodec(codec) {
8425 return (/avc1\.[\da-f]+/i.test(codec)
8426 );
8427};
8428
8429/**
8430 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
8431 * front of current time.
8432 *
8433 * @param {Array} buffer
8434 * The current buffer of gop information
8435 * @param {Number} currentTime
8436 * The current time
8437 * @param {Double} mapping
8438 * Offset to map display time to stream presentation time
8439 * @return {Array}
8440 * List of gops considered safe to append over
8441 */
8442var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
8443 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
8444 return [];
8445 }
8446
8447 // pts value for current time + 3 seconds to give a bit more wiggle room
8448 var currentTimePts = Math.ceil((currentTime - mapping + 3) * 90000);
8449
8450 var i = void 0;
8451
8452 for (i = 0; i < buffer.length; i++) {
8453 if (buffer[i].pts > currentTimePts) {
8454 break;
8455 }
8456 }
8457
8458 return buffer.slice(i);
8459};
8460
8461/**
8462 * Appends gop information (timing and byteLength) received by the transmuxer for the
8463 * gops appended in the last call to appendBuffer
8464 *
8465 * @param {Array} buffer
8466 * The current buffer of gop information
8467 * @param {Array} gops
8468 * List of new gop information
8469 * @param {boolean} replace
8470 * If true, replace the buffer with the new gop information. If false, append the
8471 * new gop information to the buffer in the right location of time.
8472 * @return {Array}
8473 * Updated list of gop information
8474 */
8475var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
8476 if (!gops.length) {
8477 return buffer;
8478 }
8479
8480 if (replace) {
8481 // If we are in safe append mode, then completely overwrite the gop buffer
8482 // with the most recent appeneded data. This will make sure that when appending
8483 // future segments, we only try to align with gops that are both ahead of current
8484 // time and in the last segment appended.
8485 return gops.slice();
8486 }
8487
8488 var start = gops[0].pts;
8489
8490 var i = 0;
8491
8492 for (i; i < buffer.length; i++) {
8493 if (buffer[i].pts >= start) {
8494 break;
8495 }
8496 }
8497
8498 return buffer.slice(0, i).concat(gops);
8499};
8500
8501/**
8502 * Removes gop information in buffer that overlaps with provided start and end
8503 *
8504 * @param {Array} buffer
8505 * The current buffer of gop information
8506 * @param {Double} start
8507 * position to start the remove at
8508 * @param {Double} end
8509 * position to end the remove at
8510 * @param {Double} mapping
8511 * Offset to map display time to stream presentation time
8512 */
8513var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
8514 var startPts = Math.ceil((start - mapping) * 90000);
8515 var endPts = Math.ceil((end - mapping) * 90000);
8516 var updatedBuffer = buffer.slice();
8517
8518 var i = buffer.length;
8519
8520 while (i--) {
8521 if (buffer[i].pts <= endPts) {
8522 break;
8523 }
8524 }
8525
8526 if (i === -1) {
8527 // no removal because end of remove range is before start of buffer
8528 return updatedBuffer;
8529 }
8530
8531 var j = i + 1;
8532
8533 while (j--) {
8534 if (buffer[j].pts <= startPts) {
8535 break;
8536 }
8537 }
8538
8539 // clamp remove range start to 0 index
8540 j = Math.max(j, 0);
8541
8542 updatedBuffer.splice(j, i - j + 1);
8543
8544 return updatedBuffer;
8545};
8546
8547var buffered = function buffered(videoBuffer, audioBuffer, audioDisabled) {
8548 var start = null;
8549 var end = null;
8550 var arity = 0;
8551 var extents = [];
8552 var ranges = [];
8553
8554 // neither buffer has been created yet
8555 if (!videoBuffer && !audioBuffer) {
8556 return videojs.createTimeRange();
8557 }
8558
8559 // only one buffer is configured
8560 if (!videoBuffer) {
8561 return audioBuffer.buffered;
8562 }
8563 if (!audioBuffer) {
8564 return videoBuffer.buffered;
8565 }
8566
8567 // both buffers are configured
8568 if (audioDisabled) {
8569 return videoBuffer.buffered;
8570 }
8571
8572 // both buffers are empty
8573 if (videoBuffer.buffered.length === 0 && audioBuffer.buffered.length === 0) {
8574 return videojs.createTimeRange();
8575 }
8576
8577 // Handle the case where we have both buffers and create an
8578 // intersection of the two
8579 var videoBuffered = videoBuffer.buffered;
8580 var audioBuffered = audioBuffer.buffered;
8581 var count = videoBuffered.length;
8582
8583 // A) Gather up all start and end times
8584 while (count--) {
8585 extents.push({ time: videoBuffered.start(count), type: 'start' });
8586 extents.push({ time: videoBuffered.end(count), type: 'end' });
8587 }
8588 count = audioBuffered.length;
8589 while (count--) {
8590 extents.push({ time: audioBuffered.start(count), type: 'start' });
8591 extents.push({ time: audioBuffered.end(count), type: 'end' });
8592 }
8593 // B) Sort them by time
8594 extents.sort(function (a, b) {
8595 return a.time - b.time;
8596 });
8597
8598 // C) Go along one by one incrementing arity for start and decrementing
8599 // arity for ends
8600 for (count = 0; count < extents.length; count++) {
8601 if (extents[count].type === 'start') {
8602 arity++;
8603
8604 // D) If arity is ever incremented to 2 we are entering an
8605 // overlapping range
8606 if (arity === 2) {
8607 start = extents[count].time;
8608 }
8609 } else if (extents[count].type === 'end') {
8610 arity--;
8611
8612 // E) If arity is ever decremented to 1 we leaving an
8613 // overlapping range
8614 if (arity === 1) {
8615 end = extents[count].time;
8616 }
8617 }
8618
8619 // F) Record overlapping ranges
8620 if (start !== null && end !== null) {
8621 ranges.push([start, end]);
8622 start = null;
8623 end = null;
8624 }
8625 }
8626
8627 return videojs.createTimeRanges(ranges);
8628};
8629
8630/**
8631 * @file virtual-source-buffer.js
8632 */
8633
8634var ONE_SECOND_IN_TS = 90000;
8635
8636// We create a wrapper around the SourceBuffer so that we can manage the
8637// state of the `updating` property manually. We have to do this because
8638// Firefox changes `updating` to false long before triggering `updateend`
8639// events and that was causing strange problems in videojs-contrib-hls
8640var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
8641 var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
8642 var wrapper = Object.create(null);
8643
8644 wrapper.updating = false;
8645 wrapper.realBuffer_ = sourceBuffer;
8646
8647 var _loop = function _loop(key) {
8648 if (typeof sourceBuffer[key] === 'function') {
8649 wrapper[key] = function () {
8650 return sourceBuffer[key].apply(sourceBuffer, arguments);
8651 };
8652 } else if (typeof wrapper[key] === 'undefined') {
8653 Object.defineProperty(wrapper, key, {
8654 get: function get$$1() {
8655 return sourceBuffer[key];
8656 },
8657 set: function set$$1(v) {
8658 return sourceBuffer[key] = v;
8659 }
8660 });
8661 }
8662 };
8663
8664 for (var key in sourceBuffer) {
8665 _loop(key);
8666 }
8667
8668 return wrapper;
8669};
8670
8671/**
8672 * VirtualSourceBuffers exist so that we can transmux non native formats
8673 * into a native format, but keep the same api as a native source buffer.
8674 * It creates a transmuxer, that works in its own thread (a web worker) and
8675 * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
8676 * then send all of that data to the naive sourcebuffer so that it is
8677 * indestinguishable from a natively supported format.
8678 *
8679 * @param {HtmlMediaSource} mediaSource the parent mediaSource
8680 * @param {Array} codecs array of codecs that we will be dealing with
8681 * @class VirtualSourceBuffer
8682 * @extends video.js.EventTarget
8683 */
8684
8685var VirtualSourceBuffer = function (_videojs$EventTarget) {
8686 inherits(VirtualSourceBuffer, _videojs$EventTarget);
8687
8688 function VirtualSourceBuffer(mediaSource, codecs) {
8689 classCallCheck(this, VirtualSourceBuffer);
8690
8691 var _this = possibleConstructorReturn(this, (VirtualSourceBuffer.__proto__ || Object.getPrototypeOf(VirtualSourceBuffer)).call(this, videojs.EventTarget));
8692
8693 _this.timestampOffset_ = 0;
8694 _this.pendingBuffers_ = [];
8695 _this.bufferUpdating_ = false;
8696
8697 _this.mediaSource_ = mediaSource;
8698 _this.codecs_ = codecs;
8699 _this.audioCodec_ = null;
8700 _this.videoCodec_ = null;
8701 _this.audioDisabled_ = false;
8702 _this.appendAudioInitSegment_ = true;
8703 _this.gopBuffer_ = [];
8704 _this.timeMapping_ = 0;
8705 _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
8706
8707 var options = {
8708 remux: false,
8709 alignGopsAtEnd: _this.safeAppend_
8710 };
8711
8712 _this.codecs_.forEach(function (codec) {
8713 if (isAudioCodec(codec)) {
8714 _this.audioCodec_ = codec;
8715 } else if (isVideoCodec(codec)) {
8716 _this.videoCodec_ = codec;
8717 }
8718 });
8719
8720 // append muxed segments to their respective native buffers as
8721 // soon as they are available
8722 _this.transmuxer_ = new TransmuxWorker();
8723 _this.transmuxer_.postMessage({ action: 'init', options: options });
8724
8725 _this.transmuxer_.onmessage = function (event) {
8726 if (event.data.action === 'data') {
8727 return _this.data_(event);
8728 }
8729
8730 if (event.data.action === 'done') {
8731 return _this.done_(event);
8732 }
8733
8734 if (event.data.action === 'gopInfo') {
8735 return _this.appendGopInfo_(event);
8736 }
8737
8738 if (event.data.action === 'videoSegmentTimingInfo') {
8739 return _this.videoSegmentTimingInfo_(event.data.videoSegmentTimingInfo);
8740 }
8741 };
8742
8743 // this timestampOffset is a property with the side-effect of resetting
8744 // baseMediaDecodeTime in the transmuxer on the setter
8745 Object.defineProperty(_this, 'timestampOffset', {
8746 get: function get$$1() {
8747 return this.timestampOffset_;
8748 },
8749 set: function set$$1(val) {
8750 if (typeof val === 'number' && val >= 0) {
8751 this.timestampOffset_ = val;
8752 this.appendAudioInitSegment_ = true;
8753
8754 // reset gop buffer on timestampoffset as this signals a change in timeline
8755 this.gopBuffer_.length = 0;
8756 this.timeMapping_ = 0;
8757
8758 // We have to tell the transmuxer to set the baseMediaDecodeTime to
8759 // the desired timestampOffset for the next segment
8760 this.transmuxer_.postMessage({
8761 action: 'setTimestampOffset',
8762 timestampOffset: val
8763 });
8764 }
8765 }
8766 });
8767
8768 // setting the append window affects both source buffers
8769 Object.defineProperty(_this, 'appendWindowStart', {
8770 get: function get$$1() {
8771 return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
8772 },
8773 set: function set$$1(start) {
8774 if (this.videoBuffer_) {
8775 this.videoBuffer_.appendWindowStart = start;
8776 }
8777 if (this.audioBuffer_) {
8778 this.audioBuffer_.appendWindowStart = start;
8779 }
8780 }
8781 });
8782
8783 // this buffer is "updating" if either of its native buffers are
8784 Object.defineProperty(_this, 'updating', {
8785 get: function get$$1() {
8786 return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
8787 }
8788 });
8789
8790 // the buffered property is the intersection of the buffered
8791 // ranges of the native source buffers
8792 Object.defineProperty(_this, 'buffered', {
8793 get: function get$$1() {
8794 return buffered(this.videoBuffer_, this.audioBuffer_, this.audioDisabled_);
8795 }
8796 });
8797 return _this;
8798 }
8799
8800 /**
8801 * When we get a data event from the transmuxer
8802 * we call this function and handle the data that
8803 * was sent to us
8804 *
8805 * @private
8806 * @param {Event} event the data event from the transmuxer
8807 */
8808
8809
8810 createClass(VirtualSourceBuffer, [{
8811 key: 'data_',
8812 value: function data_(event) {
8813 var segment = event.data.segment;
8814
8815 // Cast ArrayBuffer to TypedArray
8816 segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
8817
8818 segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
8819
8820 createTextTracksIfNecessary(this, this.mediaSource_, segment);
8821
8822 // Add the segments to the pendingBuffers array
8823 this.pendingBuffers_.push(segment);
8824 return;
8825 }
8826
8827 /**
8828 * When we get a done event from the transmuxer
8829 * we call this function and we process all
8830 * of the pending data that we have been saving in the
8831 * data_ function
8832 *
8833 * @private
8834 * @param {Event} event the done event from the transmuxer
8835 */
8836
8837 }, {
8838 key: 'done_',
8839 value: function done_(event) {
8840 // Don't process and append data if the mediaSource is closed
8841 if (this.mediaSource_.readyState === 'closed') {
8842 this.pendingBuffers_.length = 0;
8843 return;
8844 }
8845
8846 // All buffers should have been flushed from the muxer
8847 // start processing anything we have received
8848 this.processPendingSegments_();
8849 return;
8850 }
8851 }, {
8852 key: 'videoSegmentTimingInfo_',
8853 value: function videoSegmentTimingInfo_(timingInfo) {
8854 var timingInfoInSeconds = {
8855 start: {
8856 decode: timingInfo.start.dts / ONE_SECOND_IN_TS,
8857 presentation: timingInfo.start.pts / ONE_SECOND_IN_TS
8858 },
8859 end: {
8860 decode: timingInfo.end.dts / ONE_SECOND_IN_TS,
8861 presentation: timingInfo.end.pts / ONE_SECOND_IN_TS
8862 },
8863 baseMediaDecodeTime: timingInfo.baseMediaDecodeTime / ONE_SECOND_IN_TS
8864 };
8865
8866 if (timingInfo.prependedContentDuration) {
8867 timingInfoInSeconds.prependedContentDuration = timingInfo.prependedContentDuration / ONE_SECOND_IN_TS;
8868 }
8869
8870 this.trigger({
8871 type: 'videoSegmentTimingInfo',
8872 videoSegmentTimingInfo: timingInfoInSeconds
8873 });
8874 }
8875
8876 /**
8877 * Create our internal native audio/video source buffers and add
8878 * event handlers to them with the following conditions:
8879 * 1. they do not already exist on the mediaSource
8880 * 2. this VSB has a codec for them
8881 *
8882 * @private
8883 */
8884
8885 }, {
8886 key: 'createRealSourceBuffers_',
8887 value: function createRealSourceBuffers_() {
8888 var _this2 = this;
8889
8890 var types = ['audio', 'video'];
8891
8892 types.forEach(function (type) {
8893 // Don't create a SourceBuffer of this type if we don't have a
8894 // codec for it
8895 if (!_this2[type + 'Codec_']) {
8896 return;
8897 }
8898
8899 // Do nothing if a SourceBuffer of this type already exists
8900 if (_this2[type + 'Buffer_']) {
8901 return;
8902 }
8903
8904 var buffer = null;
8905
8906 // If the mediasource already has a SourceBuffer for the codec
8907 // use that
8908 if (_this2.mediaSource_[type + 'Buffer_']) {
8909 buffer = _this2.mediaSource_[type + 'Buffer_'];
8910 // In multiple audio track cases, the audio source buffer is disabled
8911 // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
8912 // than createRealSourceBuffers_ is called to create the second
8913 // VirtualSourceBuffer because that happens as a side-effect of
8914 // videojs-contrib-hls starting the audioSegmentLoader. As a result,
8915 // the audioBuffer is essentially "ownerless" and no one will toggle
8916 // the `updating` state back to false once the `updateend` event is received
8917 //
8918 // Setting `updating` to false manually will work around this
8919 // situation and allow work to continue
8920 buffer.updating = false;
8921 } else {
8922 var codecProperty = type + 'Codec_';
8923 var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';
8924
8925 buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);
8926
8927 _this2.mediaSource_[type + 'Buffer_'] = buffer;
8928 }
8929
8930 _this2[type + 'Buffer_'] = buffer;
8931
8932 // Wire up the events to the SourceBuffer
8933 ['update', 'updatestart', 'updateend'].forEach(function (event) {
8934 buffer.addEventListener(event, function () {
8935 // if audio is disabled
8936 if (type === 'audio' && _this2.audioDisabled_) {
8937 return;
8938 }
8939
8940 if (event === 'updateend') {
8941 _this2[type + 'Buffer_'].updating = false;
8942 }
8943
8944 var shouldTrigger = types.every(function (t) {
8945 // skip checking audio's updating status if audio
8946 // is not enabled
8947 if (t === 'audio' && _this2.audioDisabled_) {
8948 return true;
8949 }
8950 // if the other type is updating we don't trigger
8951 if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
8952 return false;
8953 }
8954 return true;
8955 });
8956
8957 if (shouldTrigger) {
8958 return _this2.trigger(event);
8959 }
8960 });
8961 });
8962 });
8963 }
8964
8965 /**
8966 * Emulate the native mediasource function, but our function will
8967 * send all of the proposed segments to the transmuxer so that we
8968 * can transmux them before we append them to our internal
8969 * native source buffers in the correct format.
8970 *
8971 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
8972 * @param {Uint8Array} segment the segment to append to the buffer
8973 */
8974
8975 }, {
8976 key: 'appendBuffer',
8977 value: function appendBuffer(segment) {
8978 // Start the internal "updating" state
8979 this.bufferUpdating_ = true;
8980
8981 if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
8982 var audioBuffered = this.audioBuffer_.buffered;
8983
8984 this.transmuxer_.postMessage({
8985 action: 'setAudioAppendStart',
8986 appendStart: audioBuffered.end(audioBuffered.length - 1)
8987 });
8988 }
8989
8990 if (this.videoBuffer_) {
8991 this.transmuxer_.postMessage({
8992 action: 'alignGopsWith',
8993 gopsToAlignWith: gopsSafeToAlignWith(this.gopBuffer_, this.mediaSource_.player_ ? this.mediaSource_.player_.currentTime() : null, this.timeMapping_)
8994 });
8995 }
8996
8997 this.transmuxer_.postMessage({
8998 action: 'push',
8999 // Send the typed-array of data as an ArrayBuffer so that
9000 // it can be sent as a "Transferable" and avoid the costly
9001 // memory copy
9002 data: segment.buffer,
9003
9004 // To recreate the original typed-array, we need information
9005 // about what portion of the ArrayBuffer it was a view into
9006 byteOffset: segment.byteOffset,
9007 byteLength: segment.byteLength
9008 }, [segment.buffer]);
9009 this.transmuxer_.postMessage({ action: 'flush' });
9010 }
9011
9012 /**
9013 * Appends gop information (timing and byteLength) received by the transmuxer for the
9014 * gops appended in the last call to appendBuffer
9015 *
9016 * @param {Event} event
9017 * The gopInfo event from the transmuxer
9018 * @param {Array} event.data.gopInfo
9019 * List of gop info to append
9020 */
9021
9022 }, {
9023 key: 'appendGopInfo_',
9024 value: function appendGopInfo_(event) {
9025 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, event.data.gopInfo, this.safeAppend_);
9026 }
9027
9028 /**
9029 * Emulate the native mediasource function and remove parts
9030 * of the buffer from any of our internal buffers that exist
9031 *
9032 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
9033 * @param {Double} start position to start the remove at
9034 * @param {Double} end position to end the remove at
9035 */
9036
9037 }, {
9038 key: 'remove',
9039 value: function remove(start, end) {
9040 if (this.videoBuffer_) {
9041 this.videoBuffer_.updating = true;
9042 this.videoBuffer_.remove(start, end);
9043 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
9044 }
9045 if (!this.audioDisabled_ && this.audioBuffer_) {
9046 this.audioBuffer_.updating = true;
9047 this.audioBuffer_.remove(start, end);
9048 }
9049
9050 // Remove Metadata Cues (id3)
9051 removeCuesFromTrack(start, end, this.metadataTrack_);
9052
9053 // Remove Any Captions
9054 if (this.inbandTextTracks_) {
9055 for (var track in this.inbandTextTracks_) {
9056 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
9057 }
9058 }
9059 }
9060
9061 /**
9062 * Process any segments that the muxer has output
9063 * Concatenate segments together based on type and append them into
9064 * their respective sourceBuffers
9065 *
9066 * @private
9067 */
9068
9069 }, {
9070 key: 'processPendingSegments_',
9071 value: function processPendingSegments_() {
9072 var sortedSegments = {
9073 video: {
9074 segments: [],
9075 bytes: 0
9076 },
9077 audio: {
9078 segments: [],
9079 bytes: 0
9080 },
9081 captions: [],
9082 metadata: []
9083 };
9084
9085 if (!this.pendingBuffers_.length) {
9086 // We are no longer in the internal "updating" state
9087 this.trigger('updateend');
9088 this.bufferUpdating_ = false;
9089 return;
9090 }
9091
9092 // Sort segments into separate video/audio arrays and
9093 // keep track of their total byte lengths
9094 sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
9095 var type = segment.type;
9096 var data = segment.data;
9097 var initSegment = segment.initSegment;
9098
9099 segmentObj[type].segments.push(data);
9100 segmentObj[type].bytes += data.byteLength;
9101
9102 segmentObj[type].initSegment = initSegment;
9103
9104 // Gather any captions into a single array
9105 if (segment.captions) {
9106 segmentObj.captions = segmentObj.captions.concat(segment.captions);
9107 }
9108
9109 if (segment.info) {
9110 segmentObj[type].info = segment.info;
9111 }
9112
9113 // Gather any metadata into a single array
9114 if (segment.metadata) {
9115 segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
9116 }
9117
9118 return segmentObj;
9119 }, sortedSegments);
9120
9121 // Create the real source buffers if they don't exist by now since we
9122 // finally are sure what tracks are contained in the source
9123 if (!this.videoBuffer_ && !this.audioBuffer_) {
9124 // Remove any codecs that may have been specified by default but
9125 // are no longer applicable now
9126 if (sortedSegments.video.bytes === 0) {
9127 this.videoCodec_ = null;
9128 }
9129 if (sortedSegments.audio.bytes === 0) {
9130 this.audioCodec_ = null;
9131 }
9132
9133 this.createRealSourceBuffers_();
9134 }
9135
9136 if (sortedSegments.audio.info) {
9137 this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
9138 }
9139 if (sortedSegments.video.info) {
9140 this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
9141 }
9142
9143 if (this.appendAudioInitSegment_) {
9144 if (!this.audioDisabled_ && this.audioBuffer_) {
9145 sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
9146 sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
9147 }
9148 this.appendAudioInitSegment_ = false;
9149 }
9150
9151 var triggerUpdateend = false;
9152
9153 // Merge multiple video and audio segments into one and append
9154 if (this.videoBuffer_ && sortedSegments.video.bytes) {
9155 sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
9156 sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
9157 this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
9158 } else if (this.videoBuffer_ && (this.audioDisabled_ || !this.audioBuffer_)) {
9159 // The transmuxer did not return any bytes of video, meaning it was all trimmed
9160 // for gop alignment. Since we have a video buffer and audio is disabled, updateend
9161 // will never be triggered by this source buffer, which will cause contrib-hls
9162 // to be stuck forever waiting for updateend. If audio is not disabled, updateend
9163 // will be triggered by the audio buffer, which will be sent upwards since the video
9164 // buffer will not be in an updating state.
9165 triggerUpdateend = true;
9166 }
9167
9168 // Add text-track data for all
9169 addTextTrackData(this, sortedSegments.captions, sortedSegments.metadata);
9170
9171 if (!this.audioDisabled_ && this.audioBuffer_) {
9172 this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
9173 }
9174
9175 this.pendingBuffers_.length = 0;
9176
9177 if (triggerUpdateend) {
9178 this.trigger('updateend');
9179 }
9180
9181 // We are no longer in the internal "updating" state
9182 this.bufferUpdating_ = false;
9183 }
9184
9185 /**
9186 * Combine all segments into a single Uint8Array and then append them
9187 * to the destination buffer
9188 *
9189 * @param {Object} segmentObj
9190 * @param {SourceBuffer} destinationBuffer native source buffer to append data to
9191 * @private
9192 */
9193
9194 }, {
9195 key: 'concatAndAppendSegments_',
9196 value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
9197 var offset = 0;
9198 var tempBuffer = void 0;
9199
9200 if (segmentObj.bytes) {
9201 tempBuffer = new Uint8Array(segmentObj.bytes);
9202
9203 // Combine the individual segments into one large typed-array
9204 segmentObj.segments.forEach(function (segment) {
9205 tempBuffer.set(segment, offset);
9206 offset += segment.byteLength;
9207 });
9208
9209 try {
9210 destinationBuffer.updating = true;
9211 destinationBuffer.appendBuffer(tempBuffer);
9212 } catch (error) {
9213 if (this.mediaSource_.player_) {
9214 this.mediaSource_.player_.error({
9215 code: -3,
9216 type: 'APPEND_BUFFER_ERR',
9217 message: error.message,
9218 originalError: error
9219 });
9220 }
9221 }
9222 }
9223 }
9224
9225 /**
9226 * Emulate the native mediasource function. abort any soureBuffer
9227 * actions and throw out any un-appended data.
9228 *
9229 * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
9230 */
9231
9232 }, {
9233 key: 'abort',
9234 value: function abort() {
9235 if (this.videoBuffer_) {
9236 this.videoBuffer_.abort();
9237 }
9238 if (!this.audioDisabled_ && this.audioBuffer_) {
9239 this.audioBuffer_.abort();
9240 }
9241 if (this.transmuxer_) {
9242 this.transmuxer_.postMessage({ action: 'reset' });
9243 }
9244 this.pendingBuffers_.length = 0;
9245 this.bufferUpdating_ = false;
9246 }
9247 }, {
9248 key: 'dispose',
9249 value: function dispose() {
9250 if (this.transmuxer_) {
9251 this.transmuxer_.terminate();
9252 }
9253 this.trigger('dispose');
9254 this.off();
9255 }
9256 }]);
9257 return VirtualSourceBuffer;
9258}(videojs.EventTarget);
9259
9260/**
9261 * @file html-media-source.js
9262 */
9263
9264/**
9265 * Our MediaSource implementation in HTML, mimics native
9266 * MediaSource where/if possible.
9267 *
9268 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
9269 * @class HtmlMediaSource
9270 * @extends videojs.EventTarget
9271 */
9272
9273var HtmlMediaSource = function (_videojs$EventTarget) {
9274 inherits(HtmlMediaSource, _videojs$EventTarget);
9275
9276 function HtmlMediaSource() {
9277 classCallCheck(this, HtmlMediaSource);
9278
9279 var _this = possibleConstructorReturn(this, (HtmlMediaSource.__proto__ || Object.getPrototypeOf(HtmlMediaSource)).call(this));
9280
9281 var property = void 0;
9282
9283 _this.nativeMediaSource_ = new window$1.MediaSource();
9284 // delegate to the native MediaSource's methods by default
9285 for (property in _this.nativeMediaSource_) {
9286 if (!(property in HtmlMediaSource.prototype) && typeof _this.nativeMediaSource_[property] === 'function') {
9287 _this[property] = _this.nativeMediaSource_[property].bind(_this.nativeMediaSource_);
9288 }
9289 }
9290
9291 // emulate `duration` and `seekable` until seeking can be
9292 // handled uniformly for live streams
9293 // see https://github.com/w3c/media-source/issues/5
9294 _this.duration_ = NaN;
9295 Object.defineProperty(_this, 'duration', {
9296 get: function get$$1() {
9297 if (this.duration_ === Infinity) {
9298 return this.duration_;
9299 }
9300 return this.nativeMediaSource_.duration;
9301 },
9302 set: function set$$1(duration) {
9303 this.duration_ = duration;
9304 if (duration !== Infinity) {
9305 this.nativeMediaSource_.duration = duration;
9306 return;
9307 }
9308 }
9309 });
9310 Object.defineProperty(_this, 'seekable', {
9311 get: function get$$1() {
9312 if (this.duration_ === Infinity) {
9313 return videojs.createTimeRanges([[0, this.nativeMediaSource_.duration]]);
9314 }
9315 return this.nativeMediaSource_.seekable;
9316 }
9317 });
9318
9319 Object.defineProperty(_this, 'readyState', {
9320 get: function get$$1() {
9321 return this.nativeMediaSource_.readyState;
9322 }
9323 });
9324
9325 Object.defineProperty(_this, 'activeSourceBuffers', {
9326 get: function get$$1() {
9327 return this.activeSourceBuffers_;
9328 }
9329 });
9330
9331 // the list of virtual and native SourceBuffers created by this
9332 // MediaSource
9333 _this.sourceBuffers = [];
9334
9335 _this.activeSourceBuffers_ = [];
9336
9337 /**
9338 * update the list of active source buffers based upon various
9339 * imformation from HLS and video.js
9340 *
9341 * @private
9342 */
9343 _this.updateActiveSourceBuffers_ = function () {
9344 // Retain the reference but empty the array
9345 _this.activeSourceBuffers_.length = 0;
9346
9347 // If there is only one source buffer, then it will always be active and audio will
9348 // be disabled based on the codec of the source buffer
9349 if (_this.sourceBuffers.length === 1) {
9350 var sourceBuffer = _this.sourceBuffers[0];
9351
9352 sourceBuffer.appendAudioInitSegment_ = true;
9353 sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
9354 _this.activeSourceBuffers_.push(sourceBuffer);
9355 return;
9356 }
9357
9358 // There are 2 source buffers, a combined (possibly video only) source buffer and
9359 // and an audio only source buffer.
9360 // By default, the audio in the combined virtual source buffer is enabled
9361 // and the audio-only source buffer (if it exists) is disabled.
9362 var disableCombined = false;
9363 var disableAudioOnly = true;
9364
9365 // TODO: maybe we can store the sourcebuffers on the track objects?
9366 // safari may do something like this
9367 for (var i = 0; i < _this.player_.audioTracks().length; i++) {
9368 var track = _this.player_.audioTracks()[i];
9369
9370 if (track.enabled && track.kind !== 'main') {
9371 // The enabled track is an alternate audio track so disable the audio in
9372 // the combined source buffer and enable the audio-only source buffer.
9373 disableCombined = true;
9374 disableAudioOnly = false;
9375 break;
9376 }
9377 }
9378
9379 _this.sourceBuffers.forEach(function (sourceBuffer, index) {
9380 /* eslinst-disable */
9381 // TODO once codecs are required, we can switch to using the codecs to determine
9382 // what stream is the video stream, rather than relying on videoTracks
9383 /* eslinst-enable */
9384
9385 sourceBuffer.appendAudioInitSegment_ = true;
9386
9387 if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
9388 // combined
9389 sourceBuffer.audioDisabled_ = disableCombined;
9390 } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
9391 // If the "combined" source buffer is video only, then we do not want
9392 // disable the audio-only source buffer (this is mostly for demuxed
9393 // audio and video hls)
9394 sourceBuffer.audioDisabled_ = true;
9395 disableAudioOnly = false;
9396 } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
9397 // audio only
9398 // In the case of audio only with alternate audio and disableAudioOnly is true
9399 // this means we want to disable the audio on the alternate audio sourcebuffer
9400 // but not the main "combined" source buffer. The "combined" source buffer is
9401 // always at index 0, so this ensures audio won't be disabled in both source
9402 // buffers.
9403 sourceBuffer.audioDisabled_ = index ? disableAudioOnly : !disableAudioOnly;
9404 if (sourceBuffer.audioDisabled_) {
9405 return;
9406 }
9407 }
9408
9409 _this.activeSourceBuffers_.push(sourceBuffer);
9410 });
9411 };
9412
9413 _this.onPlayerMediachange_ = function () {
9414 _this.sourceBuffers.forEach(function (sourceBuffer) {
9415 sourceBuffer.appendAudioInitSegment_ = true;
9416 });
9417 };
9418
9419 _this.onHlsReset_ = function () {
9420 _this.sourceBuffers.forEach(function (sourceBuffer) {
9421 if (sourceBuffer.transmuxer_) {
9422 sourceBuffer.transmuxer_.postMessage({ action: 'resetCaptions' });
9423 }
9424 });
9425 };
9426
9427 _this.onHlsSegmentTimeMapping_ = function (event) {
9428 _this.sourceBuffers.forEach(function (buffer) {
9429 return buffer.timeMapping_ = event.mapping;
9430 });
9431 };
9432
9433 // Re-emit MediaSource events on the polyfill
9434 ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
9435 this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
9436 }, _this);
9437
9438 // capture the associated player when the MediaSource is
9439 // successfully attached
9440 _this.on('sourceopen', function (event) {
9441 // Get the player this MediaSource is attached to
9442 var video = document.querySelector('[src="' + _this.url_ + '"]');
9443
9444 if (!video) {
9445 return;
9446 }
9447
9448 _this.player_ = videojs(video.parentNode);
9449
9450 if (!_this.player_) {
9451 return;
9452 }
9453
9454 // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
9455 // resets its state and flushes the buffer
9456 _this.player_.tech_.on('hls-reset', _this.onHlsReset_);
9457 // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
9458 // SegmentLoader inspects an MTS segment and has an accurate stream to display
9459 // time mapping
9460 _this.player_.tech_.on('hls-segment-time-mapping', _this.onHlsSegmentTimeMapping_);
9461
9462 if (_this.player_.audioTracks && _this.player_.audioTracks()) {
9463 _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
9464 _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
9465 _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
9466 }
9467
9468 _this.player_.on('mediachange', _this.onPlayerMediachange_);
9469 });
9470
9471 _this.on('sourceended', function (event) {
9472 var duration = durationOfVideo(_this.duration);
9473
9474 for (var i = 0; i < _this.sourceBuffers.length; i++) {
9475 var sourcebuffer = _this.sourceBuffers[i];
9476 var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
9477
9478 if (cues && cues.length) {
9479 cues[cues.length - 1].endTime = duration;
9480 }
9481 }
9482 });
9483
9484 // explicitly terminate any WebWorkers that were created
9485 // by SourceHandlers
9486 _this.on('sourceclose', function (event) {
9487 this.sourceBuffers.forEach(function (sourceBuffer) {
9488 if (sourceBuffer.transmuxer_) {
9489 sourceBuffer.transmuxer_.terminate();
9490 }
9491 });
9492
9493 this.sourceBuffers.length = 0;
9494 if (!this.player_) {
9495 return;
9496 }
9497
9498 if (this.player_.audioTracks && this.player_.audioTracks()) {
9499 this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
9500 this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
9501 this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
9502 }
9503
9504 // We can only change this if the player hasn't been disposed of yet
9505 // because `off` eventually tries to use the el_ property. If it has
9506 // been disposed of, then don't worry about it because there are no
9507 // event handlers left to unbind anyway
9508 if (this.player_.el_) {
9509 this.player_.off('mediachange', this.onPlayerMediachange_);
9510 }
9511
9512 if (this.player_.tech_ && this.player_.tech_.el_) {
9513 this.player_.tech_.off('hls-reset', this.onHlsReset_);
9514 this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
9515 }
9516 });
9517 return _this;
9518 }
9519
9520 /**
9521 * Add a range that that can now be seeked to.
9522 *
9523 * @param {Double} start where to start the addition
9524 * @param {Double} end where to end the addition
9525 * @private
9526 */
9527
9528
9529 createClass(HtmlMediaSource, [{
9530 key: 'addSeekableRange_',
9531 value: function addSeekableRange_(start, end) {
9532 var error = void 0;
9533
9534 if (this.duration !== Infinity) {
9535 error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
9536 error.name = 'InvalidStateError';
9537 error.code = 11;
9538 throw error;
9539 }
9540
9541 if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
9542 this.nativeMediaSource_.duration = end;
9543 }
9544 }
9545
9546 /**
9547 * Add a source buffer to the media source.
9548 *
9549 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
9550 * @param {String} type the content-type of the content
9551 * @return {Object} the created source buffer
9552 */
9553
9554 }, {
9555 key: 'addSourceBuffer',
9556 value: function addSourceBuffer(type) {
9557 var buffer = void 0;
9558 var parsedType = parseContentType(type);
9559
9560 // Create a VirtualSourceBuffer to transmux MPEG-2 transport
9561 // stream segments into fragmented MP4s
9562 if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
9563 var codecs = [];
9564
9565 if (parsedType.parameters && parsedType.parameters.codecs) {
9566 codecs = parsedType.parameters.codecs.split(',');
9567 codecs = translateLegacyCodecs(codecs);
9568 codecs = codecs.filter(function (codec) {
9569 return isAudioCodec(codec) || isVideoCodec(codec);
9570 });
9571 }
9572
9573 if (codecs.length === 0) {
9574 codecs = ['avc1.4d400d', 'mp4a.40.2'];
9575 }
9576
9577 buffer = new VirtualSourceBuffer(this, codecs);
9578
9579 if (this.sourceBuffers.length !== 0) {
9580 // If another VirtualSourceBuffer already exists, then we are creating a
9581 // SourceBuffer for an alternate audio track and therefore we know that
9582 // the source has both an audio and video track.
9583 // That means we should trigger the manual creation of the real
9584 // SourceBuffers instead of waiting for the transmuxer to return data
9585 this.sourceBuffers[0].createRealSourceBuffers_();
9586 buffer.createRealSourceBuffers_();
9587
9588 // Automatically disable the audio on the first source buffer if
9589 // a second source buffer is ever created
9590 this.sourceBuffers[0].audioDisabled_ = true;
9591 }
9592 } else {
9593 // delegate to the native implementation
9594 buffer = this.nativeMediaSource_.addSourceBuffer(type);
9595 }
9596
9597 this.sourceBuffers.push(buffer);
9598 return buffer;
9599 }
9600 }, {
9601 key: 'dispose',
9602 value: function dispose() {
9603 this.trigger('dispose');
9604 this.off();
9605
9606 this.sourceBuffers.forEach(function (buffer) {
9607 if (buffer.dispose) {
9608 buffer.dispose();
9609 }
9610 });
9611
9612 this.sourceBuffers.length = 0;
9613 }
9614 }]);
9615 return HtmlMediaSource;
9616}(videojs.EventTarget);
9617
9618/**
9619 * @file videojs-contrib-media-sources.js
9620 */
9621var urlCount = 0;
9622
9623// ------------
9624// Media Source
9625// ------------
9626
9627// store references to the media sources so they can be connected
9628// to a video element (a swf object)
9629// TODO: can we store this somewhere local to this module?
9630videojs.mediaSources = {};
9631
9632/**
9633 * Provide a method for a swf object to notify JS that a
9634 * media source is now open.
9635 *
9636 * @param {String} msObjectURL string referencing the MSE Object URL
9637 * @param {String} swfId the swf id
9638 */
9639var open = function open(msObjectURL, swfId) {
9640 var mediaSource = videojs.mediaSources[msObjectURL];
9641
9642 if (mediaSource) {
9643 mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
9644 } else {
9645 throw new Error('Media Source not found (Video.js)');
9646 }
9647};
9648
9649/**
9650 * Check to see if the native MediaSource object exists and supports
9651 * an MP4 container with both H.264 video and AAC-LC audio.
9652 *
9653 * @return {Boolean} if native media sources are supported
9654 */
9655var supportsNativeMediaSources = function supportsNativeMediaSources() {
9656 return !!window$1.MediaSource && !!window$1.MediaSource.isTypeSupported && window$1.MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
9657};
9658
9659/**
9660 * An emulation of the MediaSource API so that we can support
9661 * native and non-native functionality. returns an instance of
9662 * HtmlMediaSource.
9663 *
9664 * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
9665 */
9666var MediaSource = function MediaSource() {
9667 this.MediaSource = {
9668 open: open,
9669 supportsNativeMediaSources: supportsNativeMediaSources
9670 };
9671
9672 if (supportsNativeMediaSources()) {
9673 return new HtmlMediaSource();
9674 }
9675
9676 throw new Error('Cannot use create a virtual MediaSource for this video');
9677};
9678
9679MediaSource.open = open;
9680MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
9681
9682/**
9683 * A wrapper around the native URL for our MSE object
9684 * implementation, this object is exposed under videojs.URL
9685 *
9686 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
9687 */
9688var URL$1 = {
9689 /**
9690 * A wrapper around the native createObjectURL for our objects.
9691 * This function maps a native or emulated mediaSource to a blob
9692 * url so that it can be loaded into video.js
9693 *
9694 * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
9695 * @param {MediaSource} object the object to create a blob url to
9696 */
9697 createObjectURL: function createObjectURL(object) {
9698 var objectUrlPrefix = 'blob:vjs-media-source/';
9699 var url = void 0;
9700
9701 // use the native MediaSource to generate an object URL
9702 if (object instanceof HtmlMediaSource) {
9703 url = window$1.URL.createObjectURL(object.nativeMediaSource_);
9704 object.url_ = url;
9705 return url;
9706 }
9707 // if the object isn't an emulated MediaSource, delegate to the
9708 // native implementation
9709 if (!(object instanceof HtmlMediaSource)) {
9710 url = window$1.URL.createObjectURL(object);
9711 object.url_ = url;
9712 return url;
9713 }
9714
9715 // build a URL that can be used to map back to the emulated
9716 // MediaSource
9717 url = objectUrlPrefix + urlCount;
9718
9719 urlCount++;
9720
9721 // setup the mapping back to object
9722 videojs.mediaSources[url] = object;
9723
9724 return url;
9725 }
9726};
9727
9728videojs.MediaSource = MediaSource;
9729videojs.URL = URL$1;
9730
9731var EventTarget$1 = videojs.EventTarget,
9732 mergeOptions$2 = videojs.mergeOptions;
9733
9734/**
9735 * Returns a new master manifest that is the result of merging an updated master manifest
9736 * into the original version.
9737 *
9738 * @param {Object} oldMaster
9739 * The old parsed mpd object
9740 * @param {Object} newMaster
9741 * The updated parsed mpd object
9742 * @return {Object}
9743 * A new object representing the original master manifest with the updated media
9744 * playlists merged in
9745 */
9746
9747var updateMaster$1 = function updateMaster$$1(oldMaster, newMaster) {
9748 var noChanges = void 0;
9749 var update = mergeOptions$2(oldMaster, {
9750 // These are top level properties that can be updated
9751 duration: newMaster.duration,
9752 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
9753 });
9754
9755 // First update the playlists in playlist list
9756 for (var i = 0; i < newMaster.playlists.length; i++) {
9757 var playlistUpdate = updateMaster(update, newMaster.playlists[i]);
9758
9759 if (playlistUpdate) {
9760 update = playlistUpdate;
9761 } else {
9762 noChanges = true;
9763 }
9764 }
9765
9766 // Then update media group playlists
9767 forEachMediaGroup(newMaster, function (properties, type, group, label) {
9768 if (properties.playlists && properties.playlists.length) {
9769 var id = properties.playlists[0].id;
9770 var _playlistUpdate = updateMaster(update, properties.playlists[0]);
9771
9772 if (_playlistUpdate) {
9773 update = _playlistUpdate;
9774 // update the playlist reference within media groups
9775 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
9776 noChanges = false;
9777 }
9778 }
9779 });
9780
9781 if (noChanges) {
9782 return null;
9783 }
9784
9785 return update;
9786};
9787
9788var generateSidxKey = function generateSidxKey(sidxInfo) {
9789 // should be non-inclusive
9790 var sidxByteRangeEnd = sidxInfo.byterange.offset + sidxInfo.byterange.length - 1;
9791
9792 return sidxInfo.uri + '-' + sidxInfo.byterange.offset + '-' + sidxByteRangeEnd;
9793};
9794
9795// SIDX should be equivalent if the URI and byteranges of the SIDX match.
9796// If the SIDXs have maps, the two maps should match,
9797// both `a` and `b` missing SIDXs is considered matching.
9798// If `a` or `b` but not both have a map, they aren't matching.
9799var equivalentSidx = function equivalentSidx(a, b) {
9800 var neitherMap = Boolean(!a.map && !b.map);
9801
9802 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
9803
9804 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
9805};
9806
9807// exported for testing
9808var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
9809 var newSidxMapping = {};
9810
9811 for (var id in playlists) {
9812 var playlist = playlists[id];
9813 var currentSidxInfo = playlist.sidx;
9814
9815 if (currentSidxInfo) {
9816 var key = generateSidxKey(currentSidxInfo);
9817
9818 if (!oldSidxMapping[key]) {
9819 break;
9820 }
9821
9822 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
9823
9824 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
9825 newSidxMapping[key] = oldSidxMapping[key];
9826 }
9827 }
9828 }
9829
9830 return newSidxMapping;
9831};
9832
9833/**
9834 * A function that filters out changed items as they need to be requested separately.
9835 *
9836 * The method is exported for testing
9837 *
9838 * @param {Object} masterXml the mpd XML
9839 * @param {string} srcUrl the mpd url
9840 * @param {Date} clientOffset a time difference between server and client (passed through and not used)
9841 * @param {Object} oldSidxMapping the SIDX to compare against
9842 */
9843var filterChangedSidxMappings = function filterChangedSidxMappings(masterXml, srcUrl, clientOffset, oldSidxMapping) {
9844 // Don't pass current sidx mapping
9845 var master = parse(masterXml, {
9846 manifestUri: srcUrl,
9847 clientOffset: clientOffset
9848 });
9849
9850 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
9851 var mediaGroupSidx = videoSidx;
9852
9853 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
9854 if (properties.playlists && properties.playlists.length) {
9855 var playlists = properties.playlists;
9856
9857 mediaGroupSidx = mergeOptions$2(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
9858 }
9859 });
9860
9861 return mediaGroupSidx;
9862};
9863
9864// exported for testing
9865var requestSidx_ = function requestSidx_(sidxRange, playlist, xhr, options, finishProcessingFn) {
9866 var sidxInfo = {
9867 // resolve the segment URL relative to the playlist
9868 uri: resolveManifestRedirect(options.handleManifestRedirects, sidxRange.resolvedUri),
9869 // resolvedUri: sidxRange.resolvedUri,
9870 byterange: sidxRange.byterange,
9871 // the segment's playlist
9872 playlist: playlist
9873 };
9874
9875 var sidxRequestOptions = videojs.mergeOptions(sidxInfo, {
9876 responseType: 'arraybuffer',
9877 headers: segmentXhrHeaders(sidxInfo)
9878 });
9879
9880 return xhr(sidxRequestOptions, finishProcessingFn);
9881};
9882
9883var DashPlaylistLoader = function (_EventTarget) {
9884 inherits(DashPlaylistLoader, _EventTarget);
9885
9886 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
9887 // playlist loader setups from media groups will expect to be able to pass a playlist
9888 // (since there aren't external URLs to media playlists with DASH)
9889 function DashPlaylistLoader(srcUrlOrPlaylist, hls) {
9890 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
9891 var masterPlaylistLoader = arguments[3];
9892 classCallCheck(this, DashPlaylistLoader);
9893
9894 var _this = possibleConstructorReturn(this, (DashPlaylistLoader.__proto__ || Object.getPrototypeOf(DashPlaylistLoader)).call(this));
9895
9896 var _options$withCredenti = options.withCredentials,
9897 withCredentials = _options$withCredenti === undefined ? false : _options$withCredenti,
9898 _options$handleManife = options.handleManifestRedirects,
9899 handleManifestRedirects = _options$handleManife === undefined ? false : _options$handleManife;
9900
9901
9902 _this.hls_ = hls;
9903 _this.withCredentials = withCredentials;
9904 _this.handleManifestRedirects = handleManifestRedirects;
9905
9906 if (!srcUrlOrPlaylist) {
9907 throw new Error('A non-empty playlist URL or playlist is required');
9908 }
9909
9910 // event naming?
9911 _this.on('minimumUpdatePeriod', function () {
9912 _this.refreshXml_();
9913 });
9914
9915 // live playlist staleness timeout
9916 _this.on('mediaupdatetimeout', function () {
9917 _this.refreshMedia_(_this.media().id);
9918 });
9919
9920 _this.state = 'HAVE_NOTHING';
9921 _this.loadedPlaylists_ = {};
9922
9923 // initialize the loader state
9924 // The masterPlaylistLoader will be created with a string
9925 if (typeof srcUrlOrPlaylist === 'string') {
9926 _this.srcUrl = srcUrlOrPlaylist;
9927 // TODO: reset sidxMapping between period changes
9928 // once multi-period is refactored
9929 _this.sidxMapping_ = {};
9930 return possibleConstructorReturn(_this);
9931 }
9932
9933 _this.setupChildLoader(masterPlaylistLoader, srcUrlOrPlaylist);
9934 return _this;
9935 }
9936
9937 createClass(DashPlaylistLoader, [{
9938 key: 'setupChildLoader',
9939 value: function setupChildLoader(masterPlaylistLoader, playlist) {
9940 this.masterPlaylistLoader_ = masterPlaylistLoader;
9941 this.childPlaylist_ = playlist;
9942 }
9943 }, {
9944 key: 'dispose',
9945 value: function dispose() {
9946 this.trigger('dispose');
9947 this.stopRequest();
9948 this.loadedPlaylists_ = {};
9949 window$1.clearTimeout(this.minimumUpdatePeriodTimeout_);
9950 window$1.clearTimeout(this.mediaRequest_);
9951 window$1.clearTimeout(this.mediaUpdateTimeout);
9952
9953 this.off();
9954 }
9955 }, {
9956 key: 'hasPendingRequest',
9957 value: function hasPendingRequest() {
9958 return this.request || this.mediaRequest_;
9959 }
9960 }, {
9961 key: 'stopRequest',
9962 value: function stopRequest() {
9963 if (this.request) {
9964 var oldRequest = this.request;
9965
9966 this.request = null;
9967 oldRequest.onreadystatechange = null;
9968 oldRequest.abort();
9969 }
9970 }
9971 }, {
9972 key: 'sidxRequestFinished_',
9973 value: function sidxRequestFinished_(playlist, master, startingState, doneFn) {
9974 var _this2 = this;
9975
9976 return function (err, request) {
9977 // disposed
9978 if (!_this2.request) {
9979 return;
9980 }
9981
9982 // pending request is cleared
9983 _this2.request = null;
9984
9985 if (err) {
9986 _this2.error = {
9987 status: request.status,
9988 message: 'DASH playlist request error at URL: ' + playlist.uri,
9989 response: request.response,
9990 // MEDIA_ERR_NETWORK
9991 code: 2
9992 };
9993 if (startingState) {
9994 _this2.state = startingState;
9995 }
9996
9997 _this2.trigger('error');
9998 return doneFn(master, null);
9999 }
10000
10001 var bytes = new Uint8Array(request.response);
10002 var sidx = mp4Inspector.parseSidx(bytes.subarray(8));
10003
10004 return doneFn(master, sidx);
10005 };
10006 }
10007 }, {
10008 key: 'media',
10009 value: function media(playlist) {
10010 var _this3 = this;
10011
10012 // getter
10013 if (!playlist) {
10014 return this.media_;
10015 }
10016
10017 // setter
10018 if (this.state === 'HAVE_NOTHING') {
10019 throw new Error('Cannot switch media playlist from ' + this.state);
10020 }
10021
10022 var startingState = this.state;
10023
10024 // find the playlist object if the target playlist has been specified by URI
10025 if (typeof playlist === 'string') {
10026 if (!this.master.playlists[playlist]) {
10027 throw new Error('Unknown playlist URI: ' + playlist);
10028 }
10029 playlist = this.master.playlists[playlist];
10030 }
10031
10032 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
10033
10034 // switch to previously loaded playlists immediately
10035 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
10036 this.state = 'HAVE_METADATA';
10037 this.media_ = playlist;
10038
10039 // trigger media change if the active media has been updated
10040 if (mediaChange) {
10041 this.trigger('mediachanging');
10042 this.trigger('mediachange');
10043 }
10044 return;
10045 }
10046
10047 // switching to the active playlist is a no-op
10048 if (!mediaChange) {
10049 return;
10050 }
10051
10052 // switching from an already loaded playlist
10053 if (this.media_) {
10054 this.trigger('mediachanging');
10055 }
10056
10057 if (!playlist.sidx) {
10058 // Continue asynchronously if there is no sidx
10059 // wait one tick to allow haveMaster to run first on a child loader
10060 this.mediaRequest_ = window$1.setTimeout(this.haveMetadata.bind(this, { startingState: startingState, playlist: playlist }), 0);
10061
10062 // exit early and don't do sidx work
10063 return;
10064 }
10065
10066 // we have sidx mappings
10067 var oldMaster = void 0;
10068 var sidxMapping = void 0;
10069
10070 // sidxMapping is used when parsing the masterXml, so store
10071 // it on the masterPlaylistLoader
10072 if (this.masterPlaylistLoader_) {
10073 oldMaster = this.masterPlaylistLoader_.master;
10074 sidxMapping = this.masterPlaylistLoader_.sidxMapping_;
10075 } else {
10076 oldMaster = this.master;
10077 sidxMapping = this.sidxMapping_;
10078 }
10079
10080 var sidxKey = generateSidxKey(playlist.sidx);
10081
10082 sidxMapping[sidxKey] = {
10083 sidxInfo: playlist.sidx
10084 };
10085
10086 this.request = requestSidx_(playlist.sidx, playlist, this.hls_.xhr, { handleManifestRedirects: this.handleManifestRedirects }, this.sidxRequestFinished_(playlist, oldMaster, startingState, function (newMaster, sidx) {
10087 if (!newMaster || !sidx) {
10088 throw new Error('failed to request sidx');
10089 }
10090
10091 // update loader's sidxMapping with parsed sidx box
10092 sidxMapping[sidxKey].sidx = sidx;
10093
10094 // everything is ready just continue to haveMetadata
10095 _this3.haveMetadata({
10096 startingState: startingState,
10097 playlist: newMaster.playlists[playlist.id]
10098 });
10099 }));
10100 }
10101 }, {
10102 key: 'haveMetadata',
10103 value: function haveMetadata(_ref) {
10104 var startingState = _ref.startingState,
10105 playlist = _ref.playlist;
10106
10107 this.state = 'HAVE_METADATA';
10108 this.loadedPlaylists_[playlist.id] = playlist;
10109 this.mediaRequest_ = null;
10110
10111 // This will trigger loadedplaylist
10112 this.refreshMedia_(playlist.id);
10113
10114 // fire loadedmetadata the first time a media playlist is loaded
10115 // to resolve setup of media groups
10116 if (startingState === 'HAVE_MASTER') {
10117 this.trigger('loadedmetadata');
10118 } else {
10119 // trigger media change if the active media has been updated
10120 this.trigger('mediachange');
10121 }
10122 }
10123 }, {
10124 key: 'pause',
10125 value: function pause() {
10126 this.stopRequest();
10127 window$1.clearTimeout(this.mediaUpdateTimeout);
10128 window$1.clearTimeout(this.minimumUpdatePeriodTimeout_);
10129 if (this.state === 'HAVE_NOTHING') {
10130 // If we pause the loader before any data has been retrieved, its as if we never
10131 // started, so reset to an unstarted state.
10132 this.started = false;
10133 }
10134 }
10135 }, {
10136 key: 'load',
10137 value: function load(isFinalRendition) {
10138 var _this4 = this;
10139
10140 window$1.clearTimeout(this.mediaUpdateTimeout);
10141 window$1.clearTimeout(this.minimumUpdatePeriodTimeout_);
10142
10143 var media = this.media();
10144
10145 if (isFinalRendition) {
10146 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
10147
10148 this.mediaUpdateTimeout = window$1.setTimeout(function () {
10149 return _this4.load();
10150 }, delay);
10151 return;
10152 }
10153
10154 // because the playlists are internal to the manifest, load should either load the
10155 // main manifest, or do nothing but trigger an event
10156 if (!this.started) {
10157 this.start();
10158 return;
10159 }
10160
10161 if (media && !media.endList) {
10162 this.trigger('mediaupdatetimeout');
10163 } else {
10164 this.trigger('loadedplaylist');
10165 }
10166 }
10167
10168 /**
10169 * Parses the master xml string and updates playlist uri references
10170 *
10171 * @return {Object}
10172 * The parsed mpd manifest object
10173 */
10174
10175 }, {
10176 key: 'parseMasterXml',
10177 value: function parseMasterXml() {
10178 var master = parse(this.masterXml_, {
10179 manifestUri: this.srcUrl,
10180 clientOffset: this.clientOffset_,
10181 sidxMapping: this.sidxMapping_
10182 });
10183
10184 master.uri = this.srcUrl;
10185
10186 // Set up phony URIs for the playlists since we won't have external URIs for DASH
10187 // but reference playlists by their URI throughout the project
10188 // TODO: Should we create the dummy uris in mpd-parser as well (leaning towards yes).
10189 for (var i = 0; i < master.playlists.length; i++) {
10190 var phonyUri = 'placeholder-uri-' + i;
10191
10192 master.playlists[i].uri = phonyUri;
10193 }
10194
10195 // set up phony URIs for the media group playlists since we won't have external
10196 // URIs for DASH but reference playlists by their URI throughout the project
10197 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
10198 if (properties.playlists && properties.playlists.length) {
10199 var _phonyUri = 'placeholder-uri-' + mediaType + '-' + groupKey + '-' + labelKey;
10200 var id = createPlaylistID(0, _phonyUri);
10201
10202 properties.playlists[0].uri = _phonyUri;
10203 properties.playlists[0].id = id;
10204 // setup ID and URI references (URI for backwards compatibility)
10205 master.playlists[id] = properties.playlists[0];
10206 master.playlists[_phonyUri] = properties.playlists[0];
10207 }
10208 });
10209
10210 setupMediaPlaylists(master);
10211 resolveMediaGroupUris(master);
10212
10213 return master;
10214 }
10215 }, {
10216 key: 'start',
10217 value: function start() {
10218 var _this5 = this;
10219
10220 this.started = true;
10221
10222 // We don't need to request the master manifest again
10223 // Call this asynchronously to match the xhr request behavior below
10224 if (this.masterPlaylistLoader_) {
10225 this.mediaRequest_ = window$1.setTimeout(this.haveMaster_.bind(this), 0);
10226 return;
10227 }
10228
10229 // request the specified URL
10230 this.request = this.hls_.xhr({
10231 uri: this.srcUrl,
10232 withCredentials: this.withCredentials
10233 }, function (error, req) {
10234 // disposed
10235 if (!_this5.request) {
10236 return;
10237 }
10238
10239 // clear the loader's request reference
10240 _this5.request = null;
10241
10242 if (error) {
10243 _this5.error = {
10244 status: req.status,
10245 message: 'DASH playlist request error at URL: ' + _this5.srcUrl,
10246 responseText: req.responseText,
10247 // MEDIA_ERR_NETWORK
10248 code: 2
10249 };
10250 if (_this5.state === 'HAVE_NOTHING') {
10251 _this5.started = false;
10252 }
10253 return _this5.trigger('error');
10254 }
10255
10256 _this5.masterXml_ = req.responseText;
10257
10258 if (req.responseHeaders && req.responseHeaders.date) {
10259 _this5.masterLoaded_ = Date.parse(req.responseHeaders.date);
10260 } else {
10261 _this5.masterLoaded_ = Date.now();
10262 }
10263
10264 _this5.srcUrl = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.srcUrl, req);
10265
10266 _this5.syncClientServerClock_(_this5.onClientServerClockSync_.bind(_this5));
10267 });
10268 }
10269
10270 /**
10271 * Parses the master xml for UTCTiming node to sync the client clock to the server
10272 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
10273 *
10274 * @param {Function} done
10275 * Function to call when clock sync has completed
10276 */
10277
10278 }, {
10279 key: 'syncClientServerClock_',
10280 value: function syncClientServerClock_(done) {
10281 var _this6 = this;
10282
10283 var utcTiming = parseUTCTiming(this.masterXml_);
10284
10285 // No UTCTiming element found in the mpd. Use Date header from mpd request as the
10286 // server clock
10287 if (utcTiming === null) {
10288 this.clientOffset_ = this.masterLoaded_ - Date.now();
10289 return done();
10290 }
10291
10292 if (utcTiming.method === 'DIRECT') {
10293 this.clientOffset_ = utcTiming.value - Date.now();
10294 return done();
10295 }
10296
10297 this.request = this.hls_.xhr({
10298 uri: resolveUrl(this.srcUrl, utcTiming.value),
10299 method: utcTiming.method,
10300 withCredentials: this.withCredentials
10301 }, function (error, req) {
10302 // disposed
10303 if (!_this6.request) {
10304 return;
10305 }
10306
10307 if (error) {
10308 // sync request failed, fall back to using date header from mpd
10309 // TODO: log warning
10310 _this6.clientOffset_ = _this6.masterLoaded_ - Date.now();
10311 return done();
10312 }
10313
10314 var serverTime = void 0;
10315
10316 if (utcTiming.method === 'HEAD') {
10317 if (!req.responseHeaders || !req.responseHeaders.date) {
10318 // expected date header not preset, fall back to using date header from mpd
10319 // TODO: log warning
10320 serverTime = _this6.masterLoaded_;
10321 } else {
10322 serverTime = Date.parse(req.responseHeaders.date);
10323 }
10324 } else {
10325 serverTime = Date.parse(req.responseText);
10326 }
10327
10328 _this6.clientOffset_ = serverTime - Date.now();
10329
10330 done();
10331 });
10332 }
10333 }, {
10334 key: 'haveMaster_',
10335 value: function haveMaster_() {
10336 this.state = 'HAVE_MASTER';
10337 // clear media request
10338 this.mediaRequest_ = null;
10339
10340 if (!this.masterPlaylistLoader_) {
10341 this.master = this.parseMasterXml();
10342 // We have the master playlist at this point, so
10343 // trigger this to allow MasterPlaylistController
10344 // to make an initial playlist selection
10345 this.trigger('loadedplaylist');
10346 } else if (!this.media_) {
10347 // no media playlist was specifically selected so select
10348 // the one the child playlist loader was created with
10349 this.media(this.childPlaylist_);
10350 }
10351 }
10352
10353 /**
10354 * Handler for after client/server clock synchronization has happened. Sets up
10355 * xml refresh timer if specificed by the manifest.
10356 */
10357
10358 }, {
10359 key: 'onClientServerClockSync_',
10360 value: function onClientServerClockSync_() {
10361 var _this7 = this;
10362
10363 this.haveMaster_();
10364
10365 if (!this.hasPendingRequest() && !this.media_) {
10366 this.media(this.master.playlists[0]);
10367 }
10368
10369 // TODO: minimumUpdatePeriod can have a value of 0. Currently the manifest will not
10370 // be refreshed when this is the case. The inter-op guide says that when the
10371 // minimumUpdatePeriod is 0, the manifest should outline all currently available
10372 // segments, but future segments may require an update. I think a good solution
10373 // would be to update the manifest at the same rate that the media playlists
10374 // are "refreshed", i.e. every targetDuration.
10375 if (this.master && this.master.minimumUpdatePeriod) {
10376 this.minimumUpdatePeriodTimeout_ = window$1.setTimeout(function () {
10377 _this7.trigger('minimumUpdatePeriod');
10378 }, this.master.minimumUpdatePeriod);
10379 }
10380 }
10381
10382 /**
10383 * Sends request to refresh the master xml and updates the parsed master manifest
10384 * TODO: Does the client offset need to be recalculated when the xml is refreshed?
10385 */
10386
10387 }, {
10388 key: 'refreshXml_',
10389 value: function refreshXml_() {
10390 var _this8 = this;
10391
10392 // The srcUrl here *may* need to pass through handleManifestsRedirects when
10393 // sidx is implemented
10394 this.request = this.hls_.xhr({
10395 uri: this.srcUrl,
10396 withCredentials: this.withCredentials
10397 }, function (error, req) {
10398 // disposed
10399 if (!_this8.request) {
10400 return;
10401 }
10402
10403 // clear the loader's request reference
10404 _this8.request = null;
10405
10406 if (error) {
10407 _this8.error = {
10408 status: req.status,
10409 message: 'DASH playlist request error at URL: ' + _this8.srcUrl,
10410 responseText: req.responseText,
10411 // MEDIA_ERR_NETWORK
10412 code: 2
10413 };
10414 if (_this8.state === 'HAVE_NOTHING') {
10415 _this8.started = false;
10416 }
10417 return _this8.trigger('error');
10418 }
10419
10420 _this8.masterXml_ = req.responseText;
10421
10422 // This will filter out updated sidx info from the mapping
10423 _this8.sidxMapping_ = filterChangedSidxMappings(_this8.masterXml_, _this8.srcUrl, _this8.clientOffset_, _this8.sidxMapping_);
10424
10425 var master = _this8.parseMasterXml();
10426 var updatedMaster = updateMaster$1(_this8.master, master);
10427 var currentSidxInfo = _this8.media().sidx;
10428
10429 if (updatedMaster) {
10430 if (currentSidxInfo) {
10431 var sidxKey = generateSidxKey(currentSidxInfo);
10432
10433 // the sidx was updated, so the previous mapping was removed
10434 if (!_this8.sidxMapping_[sidxKey]) {
10435 var playlist = _this8.media();
10436
10437 _this8.request = requestSidx_(playlist.sidx, playlist, _this8.hls_.xhr, { handleManifestRedirects: _this8.handleManifestRedirects }, _this8.sidxRequestFinished_(playlist, master, _this8.state, function (newMaster, sidx) {
10438 if (!newMaster || !sidx) {
10439 throw new Error('failed to request sidx on minimumUpdatePeriod');
10440 }
10441
10442 // update loader's sidxMapping with parsed sidx box
10443 _this8.sidxMapping_[sidxKey].sidx = sidx;
10444
10445 _this8.minimumUpdatePeriodTimeout_ = window$1.setTimeout(function () {
10446 _this8.trigger('minimumUpdatePeriod');
10447 }, _this8.master.minimumUpdatePeriod);
10448
10449 // TODO: do we need to reload the current playlist?
10450 _this8.refreshMedia_(_this8.media().id);
10451
10452 return;
10453 }));
10454 }
10455 } else {
10456
10457 _this8.master = updatedMaster;
10458 }
10459 }
10460
10461 _this8.minimumUpdatePeriodTimeout_ = window$1.setTimeout(function () {
10462 _this8.trigger('minimumUpdatePeriod');
10463 }, _this8.master.minimumUpdatePeriod);
10464 });
10465 }
10466
10467 /**
10468 * Refreshes the media playlist by re-parsing the master xml and updating playlist
10469 * references. If this is an alternate loader, the updated parsed manifest is retrieved
10470 * from the master loader.
10471 */
10472
10473 }, {
10474 key: 'refreshMedia_',
10475 value: function refreshMedia_(mediaID) {
10476 var _this9 = this;
10477
10478 if (!mediaID) {
10479 throw new Error('refreshMedia_ must take a media id');
10480 }
10481
10482 var oldMaster = void 0;
10483 var newMaster = void 0;
10484
10485 if (this.masterPlaylistLoader_) {
10486 oldMaster = this.masterPlaylistLoader_.master;
10487 newMaster = this.masterPlaylistLoader_.parseMasterXml();
10488 } else {
10489 oldMaster = this.master;
10490 newMaster = this.parseMasterXml();
10491 }
10492
10493 var updatedMaster = updateMaster$1(oldMaster, newMaster);
10494
10495 if (updatedMaster) {
10496 if (this.masterPlaylistLoader_) {
10497 this.masterPlaylistLoader_.master = updatedMaster;
10498 } else {
10499 this.master = updatedMaster;
10500 }
10501 this.media_ = updatedMaster.playlists[mediaID];
10502 } else {
10503 this.media_ = newMaster.playlists[mediaID];
10504 this.trigger('playlistunchanged');
10505 }
10506
10507 if (!this.media().endList) {
10508 this.mediaUpdateTimeout = window$1.setTimeout(function () {
10509 _this9.trigger('mediaupdatetimeout');
10510 }, refreshDelay(this.media(), !!updatedMaster));
10511 }
10512
10513 this.trigger('loadedplaylist');
10514 }
10515 }]);
10516 return DashPlaylistLoader;
10517}(EventTarget$1);
10518
10519var logger = function logger(source) {
10520 if (videojs.log.debug) {
10521 return videojs.log.debug.bind(videojs, 'VHS:', source + ' >');
10522 }
10523
10524 return function () {};
10525};
10526
10527function noop() {}
10528
10529/**
10530 * @file source-updater.js
10531 */
10532
10533/**
10534 * A queue of callbacks to be serialized and applied when a
10535 * MediaSource and its associated SourceBuffers are not in the
10536 * updating state. It is used by the segment loader to update the
10537 * underlying SourceBuffers when new data is loaded, for instance.
10538 *
10539 * @class SourceUpdater
10540 * @param {MediaSource} mediaSource the MediaSource to create the
10541 * SourceBuffer from
10542 * @param {String} mimeType the desired MIME type of the underlying
10543 * SourceBuffer
10544 * @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer is
10545 * added to the media source
10546 */
10547
10548var SourceUpdater = function () {
10549 function SourceUpdater(mediaSource, mimeType, type, sourceBufferEmitter) {
10550 classCallCheck(this, SourceUpdater);
10551
10552 this.callbacks_ = [];
10553 this.pendingCallback_ = null;
10554 this.timestampOffset_ = 0;
10555 this.mediaSource = mediaSource;
10556 this.processedAppend_ = false;
10557 this.type_ = type;
10558 this.mimeType_ = mimeType;
10559 this.logger_ = logger('SourceUpdater[' + type + '][' + mimeType + ']');
10560
10561 if (mediaSource.readyState === 'closed') {
10562 mediaSource.addEventListener('sourceopen', this.createSourceBuffer_.bind(this, mimeType, sourceBufferEmitter));
10563 } else {
10564 this.createSourceBuffer_(mimeType, sourceBufferEmitter);
10565 }
10566 }
10567
10568 createClass(SourceUpdater, [{
10569 key: 'createSourceBuffer_',
10570 value: function createSourceBuffer_(mimeType, sourceBufferEmitter) {
10571 var _this = this;
10572
10573 this.sourceBuffer_ = this.mediaSource.addSourceBuffer(mimeType);
10574
10575 this.logger_('created SourceBuffer');
10576
10577 if (sourceBufferEmitter) {
10578 sourceBufferEmitter.trigger('sourcebufferadded');
10579
10580 if (this.mediaSource.sourceBuffers.length < 2) {
10581 // There's another source buffer we must wait for before we can start updating
10582 // our own (or else we can get into a bad state, i.e., appending video/audio data
10583 // before the other video/audio source buffer is available and leading to a video
10584 // or audio only buffer).
10585 sourceBufferEmitter.on('sourcebufferadded', function () {
10586 _this.start_();
10587 });
10588 return;
10589 }
10590 }
10591
10592 this.start_();
10593 }
10594 }, {
10595 key: 'start_',
10596 value: function start_() {
10597 var _this2 = this;
10598
10599 this.started_ = true;
10600
10601 // run completion handlers and process callbacks as updateend
10602 // events fire
10603 this.onUpdateendCallback_ = function () {
10604 var pendingCallback = _this2.pendingCallback_;
10605
10606 _this2.pendingCallback_ = null;
10607 _this2.sourceBuffer_.removing = false;
10608
10609 _this2.logger_('buffered [' + printableRange(_this2.buffered()) + ']');
10610
10611 if (pendingCallback) {
10612 pendingCallback();
10613 }
10614
10615 _this2.runCallback_();
10616 };
10617
10618 this.sourceBuffer_.addEventListener('updateend', this.onUpdateendCallback_);
10619
10620 this.runCallback_();
10621 }
10622
10623 /**
10624 * Aborts the current segment and resets the segment parser.
10625 *
10626 * @param {Function} done function to call when done
10627 * @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
10628 */
10629
10630 }, {
10631 key: 'abort',
10632 value: function abort(done) {
10633 var _this3 = this;
10634
10635 if (this.processedAppend_) {
10636 this.queueCallback_(function () {
10637 _this3.sourceBuffer_.abort();
10638 }, done);
10639 }
10640 }
10641
10642 /**
10643 * Queue an update to append an ArrayBuffer.
10644 *
10645 * @param {ArrayBuffer} bytes
10646 * @param {Function} done the function to call when done
10647 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
10648 */
10649
10650 }, {
10651 key: 'appendBuffer',
10652 value: function appendBuffer(config, done) {
10653 var _this4 = this;
10654
10655 this.processedAppend_ = true;
10656 this.queueCallback_(function () {
10657 if (config.videoSegmentTimingInfoCallback) {
10658 _this4.sourceBuffer_.addEventListener('videoSegmentTimingInfo', config.videoSegmentTimingInfoCallback);
10659 }
10660 _this4.sourceBuffer_.appendBuffer(config.bytes);
10661 }, function () {
10662 if (config.videoSegmentTimingInfoCallback) {
10663 _this4.sourceBuffer_.removeEventListener('videoSegmentTimingInfo', config.videoSegmentTimingInfoCallback);
10664 }
10665 done();
10666 });
10667 }
10668
10669 /**
10670 * Indicates what TimeRanges are buffered in the managed SourceBuffer.
10671 *
10672 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-buffered
10673 */
10674
10675 }, {
10676 key: 'buffered',
10677 value: function buffered() {
10678 if (!this.sourceBuffer_) {
10679 return videojs.createTimeRanges();
10680 }
10681 return this.sourceBuffer_.buffered;
10682 }
10683
10684 /**
10685 * Queue an update to remove a time range from the buffer.
10686 *
10687 * @param {Number} start where to start the removal
10688 * @param {Number} end where to end the removal
10689 * @param {Function} [done=noop] optional callback to be executed when the remove
10690 * operation is complete
10691 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
10692 */
10693
10694 }, {
10695 key: 'remove',
10696 value: function remove(start, end) {
10697 var _this5 = this;
10698
10699 var done = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop;
10700
10701 if (this.processedAppend_) {
10702 this.queueCallback_(function () {
10703 _this5.logger_('remove [' + start + ' => ' + end + ']');
10704 _this5.sourceBuffer_.removing = true;
10705 _this5.sourceBuffer_.remove(start, end);
10706 }, done);
10707 }
10708 }
10709
10710 /**
10711 * Whether the underlying sourceBuffer is updating or not
10712 *
10713 * @return {Boolean} the updating status of the SourceBuffer
10714 */
10715
10716 }, {
10717 key: 'updating',
10718 value: function updating() {
10719 // we are updating if the sourcebuffer is updating or
10720 return !this.sourceBuffer_ || this.sourceBuffer_.updating ||
10721 // if we have a pending callback that is not our internal noop
10722 !!this.pendingCallback_ && this.pendingCallback_ !== noop;
10723 }
10724
10725 /**
10726 * Set/get the timestampoffset on the SourceBuffer
10727 *
10728 * @return {Number} the timestamp offset
10729 */
10730
10731 }, {
10732 key: 'timestampOffset',
10733 value: function timestampOffset(offset) {
10734 var _this6 = this;
10735
10736 if (typeof offset !== 'undefined') {
10737 this.queueCallback_(function () {
10738 _this6.sourceBuffer_.timestampOffset = offset;
10739 _this6.runCallback_();
10740 });
10741 this.timestampOffset_ = offset;
10742 }
10743 return this.timestampOffset_;
10744 }
10745
10746 /**
10747 * Queue a callback to run
10748 */
10749
10750 }, {
10751 key: 'queueCallback_',
10752 value: function queueCallback_(callback, done) {
10753 this.callbacks_.push([callback.bind(this), done]);
10754 this.runCallback_();
10755 }
10756
10757 /**
10758 * Run a queued callback
10759 */
10760
10761 }, {
10762 key: 'runCallback_',
10763 value: function runCallback_() {
10764 var callbacks = void 0;
10765
10766 if (!this.updating() && this.callbacks_.length && this.started_) {
10767 callbacks = this.callbacks_.shift();
10768 this.pendingCallback_ = callbacks[1];
10769 callbacks[0]();
10770 }
10771 }
10772
10773 /**
10774 * dispose of the source updater and the underlying sourceBuffer
10775 */
10776
10777 }, {
10778 key: 'dispose',
10779 value: function dispose() {
10780 var _this7 = this;
10781
10782 var disposeFn = function disposeFn() {
10783 if (_this7.sourceBuffer_ && _this7.mediaSource.readyState === 'open') {
10784 _this7.sourceBuffer_.abort();
10785 }
10786 _this7.sourceBuffer_.removeEventListener('updateend', disposeFn);
10787 };
10788
10789 this.sourceBuffer_.removeEventListener('updateend', this.onUpdateendCallback_);
10790 if (this.sourceBuffer_.removing) {
10791 this.sourceBuffer_.addEventListener('updateend', disposeFn);
10792 } else {
10793 disposeFn();
10794 }
10795 }
10796 }]);
10797 return SourceUpdater;
10798}();
10799
10800var Config = {
10801 GOAL_BUFFER_LENGTH: 30,
10802 MAX_GOAL_BUFFER_LENGTH: 60,
10803 GOAL_BUFFER_LENGTH_RATE: 1,
10804 // 0.5 MB/s
10805 INITIAL_BANDWIDTH: 4194304,
10806 // A fudge factor to apply to advertised playlist bitrates to account for
10807 // temporary flucations in client bandwidth
10808 BANDWIDTH_VARIANCE: 1.2,
10809 // How much of the buffer must be filled before we consider upswitching
10810 BUFFER_LOW_WATER_LINE: 0,
10811 MAX_BUFFER_LOW_WATER_LINE: 30,
10812 BUFFER_LOW_WATER_LINE_RATE: 1
10813};
10814
10815var REQUEST_ERRORS = {
10816 FAILURE: 2,
10817 TIMEOUT: -101,
10818 ABORTED: -102
10819};
10820
10821/**
10822 * Abort all requests
10823 *
10824 * @param {Object} activeXhrs - an object that tracks all XHR requests
10825 */
10826var abortAll = function abortAll(activeXhrs) {
10827 activeXhrs.forEach(function (xhr) {
10828 xhr.abort();
10829 });
10830};
10831
10832/**
10833 * Gather important bandwidth stats once a request has completed
10834 *
10835 * @param {Object} request - the XHR request from which to gather stats
10836 */
10837var getRequestStats = function getRequestStats(request) {
10838 return {
10839 bandwidth: request.bandwidth,
10840 bytesReceived: request.bytesReceived || 0,
10841 roundTripTime: request.roundTripTime || 0
10842 };
10843};
10844
10845/**
10846 * If possible gather bandwidth stats as a request is in
10847 * progress
10848 *
10849 * @param {Event} progressEvent - an event object from an XHR's progress event
10850 */
10851var getProgressStats = function getProgressStats(progressEvent) {
10852 var request = progressEvent.target;
10853 var roundTripTime = Date.now() - request.requestTime;
10854 var stats = {
10855 bandwidth: Infinity,
10856 bytesReceived: 0,
10857 roundTripTime: roundTripTime || 0
10858 };
10859
10860 stats.bytesReceived = progressEvent.loaded;
10861 // This can result in Infinity if stats.roundTripTime is 0 but that is ok
10862 // because we should only use bandwidth stats on progress to determine when
10863 // abort a request early due to insufficient bandwidth
10864 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
10865
10866 return stats;
10867};
10868
10869/**
10870 * Handle all error conditions in one place and return an object
10871 * with all the information
10872 *
10873 * @param {Error|null} error - if non-null signals an error occured with the XHR
10874 * @param {Object} request - the XHR request that possibly generated the error
10875 */
10876var handleErrors = function handleErrors(error, request) {
10877 if (request.timedout) {
10878 return {
10879 status: request.status,
10880 message: 'HLS request timed-out at URL: ' + request.uri,
10881 code: REQUEST_ERRORS.TIMEOUT,
10882 xhr: request
10883 };
10884 }
10885
10886 if (request.aborted) {
10887 return {
10888 status: request.status,
10889 message: 'HLS request aborted at URL: ' + request.uri,
10890 code: REQUEST_ERRORS.ABORTED,
10891 xhr: request
10892 };
10893 }
10894
10895 if (error) {
10896 return {
10897 status: request.status,
10898 message: 'HLS request errored at URL: ' + request.uri,
10899 code: REQUEST_ERRORS.FAILURE,
10900 xhr: request
10901 };
10902 }
10903
10904 return null;
10905};
10906
10907/**
10908 * Handle responses for key data and convert the key data to the correct format
10909 * for the decryption step later
10910 *
10911 * @param {Object} segment - a simplified copy of the segmentInfo object
10912 * from SegmentLoader
10913 * @param {Function} finishProcessingFn - a callback to execute to continue processing
10914 * this request
10915 */
10916var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
10917 return function (error, request) {
10918 var response = request.response;
10919 var errorObj = handleErrors(error, request);
10920
10921 if (errorObj) {
10922 return finishProcessingFn(errorObj, segment);
10923 }
10924
10925 if (response.byteLength !== 16) {
10926 return finishProcessingFn({
10927 status: request.status,
10928 message: 'Invalid HLS key at URL: ' + request.uri,
10929 code: REQUEST_ERRORS.FAILURE,
10930 xhr: request
10931 }, segment);
10932 }
10933
10934 var view = new DataView(response);
10935
10936 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
10937 return finishProcessingFn(null, segment);
10938 };
10939};
10940
10941/**
10942 * Handle init-segment responses
10943 *
10944 * @param {Object} segment - a simplified copy of the segmentInfo object
10945 * from SegmentLoader
10946 * @param {Function} finishProcessingFn - a callback to execute to continue processing
10947 * this request
10948 */
10949var handleInitSegmentResponse = function handleInitSegmentResponse(segment, captionParser, finishProcessingFn) {
10950 return function (error, request) {
10951 var response = request.response;
10952 var errorObj = handleErrors(error, request);
10953
10954 if (errorObj) {
10955 return finishProcessingFn(errorObj, segment);
10956 }
10957
10958 // stop processing if received empty content
10959 if (response.byteLength === 0) {
10960 return finishProcessingFn({
10961 status: request.status,
10962 message: 'Empty HLS segment content at URL: ' + request.uri,
10963 code: REQUEST_ERRORS.FAILURE,
10964 xhr: request
10965 }, segment);
10966 }
10967
10968 segment.map.bytes = new Uint8Array(request.response);
10969
10970 // Initialize CaptionParser if it hasn't been yet
10971 if (captionParser && !captionParser.isInitialized()) {
10972 captionParser.init();
10973 }
10974
10975 segment.map.timescales = mp4probe.timescale(segment.map.bytes);
10976 segment.map.videoTrackIds = mp4probe.videoTrackIds(segment.map.bytes);
10977
10978 return finishProcessingFn(null, segment);
10979 };
10980};
10981
10982/**
10983 * Response handler for segment-requests being sure to set the correct
10984 * property depending on whether the segment is encryped or not
10985 * Also records and keeps track of stats that are used for ABR purposes
10986 *
10987 * @param {Object} segment - a simplified copy of the segmentInfo object
10988 * from SegmentLoader
10989 * @param {Function} finishProcessingFn - a callback to execute to continue processing
10990 * this request
10991 */
10992var handleSegmentResponse = function handleSegmentResponse(segment, captionParser, finishProcessingFn) {
10993 return function (error, request) {
10994 var response = request.response;
10995 var errorObj = handleErrors(error, request);
10996 var parsed = void 0;
10997
10998 if (errorObj) {
10999 return finishProcessingFn(errorObj, segment);
11000 }
11001
11002 // stop processing if received empty content
11003 if (response.byteLength === 0) {
11004 return finishProcessingFn({
11005 status: request.status,
11006 message: 'Empty HLS segment content at URL: ' + request.uri,
11007 code: REQUEST_ERRORS.FAILURE,
11008 xhr: request
11009 }, segment);
11010 }
11011
11012 segment.stats = getRequestStats(request);
11013
11014 if (segment.key) {
11015 segment.encryptedBytes = new Uint8Array(request.response);
11016 } else {
11017 segment.bytes = new Uint8Array(request.response);
11018 }
11019
11020 // This is likely an FMP4 and has the init segment.
11021 // Run through the CaptionParser in case there are captions.
11022 if (captionParser && segment.map && segment.map.bytes) {
11023 // Initialize CaptionParser if it hasn't been yet
11024 if (!captionParser.isInitialized()) {
11025 captionParser.init();
11026 }
11027
11028 parsed = captionParser.parse(segment.bytes, segment.map.videoTrackIds, segment.map.timescales);
11029
11030 if (parsed && parsed.captions) {
11031 segment.captionStreams = parsed.captionStreams;
11032 segment.fmp4Captions = parsed.captions;
11033 }
11034 }
11035
11036 return finishProcessingFn(null, segment);
11037 };
11038};
11039
11040/**
11041 * Decrypt the segment via the decryption web worker
11042 *
11043 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
11044 * @param {Object} segment - a simplified copy of the segmentInfo object
11045 * from SegmentLoader
11046 * @param {Function} doneFn - a callback that is executed after decryption has completed
11047 */
11048var decryptSegment = function decryptSegment(decrypter, segment, doneFn) {
11049 var decryptionHandler = function decryptionHandler(event) {
11050 if (event.data.source === segment.requestId) {
11051 decrypter.removeEventListener('message', decryptionHandler);
11052 var decrypted = event.data.decrypted;
11053
11054 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
11055 return doneFn(null, segment);
11056 }
11057 };
11058
11059 decrypter.addEventListener('message', decryptionHandler);
11060
11061 var keyBytes = void 0;
11062
11063 if (segment.key.bytes.slice) {
11064 keyBytes = segment.key.bytes.slice();
11065 } else {
11066 keyBytes = new Uint32Array(Array.prototype.slice.call(segment.key.bytes));
11067 }
11068
11069 // this is an encrypted segment
11070 // incrementally decrypt the segment
11071 decrypter.postMessage(createTransferableMessage({
11072 source: segment.requestId,
11073 encrypted: segment.encryptedBytes,
11074 key: keyBytes,
11075 iv: segment.key.iv
11076 }), [segment.encryptedBytes.buffer, keyBytes.buffer]);
11077};
11078
11079/**
11080 * This function waits for all XHRs to finish (with either success or failure)
11081 * before continueing processing via it's callback. The function gathers errors
11082 * from each request into a single errors array so that the error status for
11083 * each request can be examined later.
11084 *
11085 * @param {Object} activeXhrs - an object that tracks all XHR requests
11086 * @param {WebWorker} decrypter - a WebWorker interface to AES-128 decryption routines
11087 * @param {Function} doneFn - a callback that is executed after all resources have been
11088 * downloaded and any decryption completed
11089 */
11090var waitForCompletion = function waitForCompletion(activeXhrs, decrypter, doneFn) {
11091 var count = 0;
11092 var didError = false;
11093
11094 return function (error, segment) {
11095 if (didError) {
11096 return;
11097 }
11098
11099 if (error) {
11100 didError = true;
11101 // If there are errors, we have to abort any outstanding requests
11102 abortAll(activeXhrs);
11103
11104 // Even though the requests above are aborted, and in theory we could wait until we
11105 // handle the aborted events from those requests, there are some cases where we may
11106 // never get an aborted event. For instance, if the network connection is lost and
11107 // there were two requests, the first may have triggered an error immediately, while
11108 // the second request remains unsent. In that case, the aborted algorithm will not
11109 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
11110 //
11111 // We also can't rely on the ready state of the XHR, since the request that
11112 // triggered the connection error may also show as a ready state of 0 (unsent).
11113 // Therefore, we have to finish this group of requests immediately after the first
11114 // seen error.
11115 return doneFn(error, segment);
11116 }
11117
11118 count += 1;
11119
11120 if (count === activeXhrs.length) {
11121 // Keep track of when *all* of the requests have completed
11122 segment.endOfAllRequests = Date.now();
11123
11124 if (segment.encryptedBytes) {
11125 return decryptSegment(decrypter, segment, doneFn);
11126 }
11127 // Otherwise, everything is ready just continue
11128 return doneFn(null, segment);
11129 }
11130 };
11131};
11132
11133/**
11134 * Simple progress event callback handler that gathers some stats before
11135 * executing a provided callback with the `segment` object
11136 *
11137 * @param {Object} segment - a simplified copy of the segmentInfo object
11138 * from SegmentLoader
11139 * @param {Function} progressFn - a callback that is executed each time a progress event
11140 * is received
11141 * @param {Event} event - the progress event object from XMLHttpRequest
11142 */
11143var handleProgress = function handleProgress(segment, progressFn) {
11144 return function (event) {
11145 segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event));
11146
11147 // record the time that we receive the first byte of data
11148 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
11149 segment.stats.firstBytesReceivedAt = Date.now();
11150 }
11151
11152 return progressFn(event, segment);
11153 };
11154};
11155
11156/**
11157 * Load all resources and does any processing necessary for a media-segment
11158 *
11159 * Features:
11160 * decrypts the media-segment if it has a key uri and an iv
11161 * aborts *all* requests if *any* one request fails
11162 *
11163 * The segment object, at minimum, has the following format:
11164 * {
11165 * resolvedUri: String,
11166 * [byterange]: {
11167 * offset: Number,
11168 * length: Number
11169 * },
11170 * [key]: {
11171 * resolvedUri: String
11172 * [byterange]: {
11173 * offset: Number,
11174 * length: Number
11175 * },
11176 * iv: {
11177 * bytes: Uint32Array
11178 * }
11179 * },
11180 * [map]: {
11181 * resolvedUri: String,
11182 * [byterange]: {
11183 * offset: Number,
11184 * length: Number
11185 * },
11186 * [bytes]: Uint8Array
11187 * }
11188 * }
11189 * ...where [name] denotes optional properties
11190 *
11191 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
11192 * @param {Object} xhrOptions - the base options to provide to all xhr requests
11193 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
11194 * decryption routines
11195 * @param {Object} segment - a simplified copy of the segmentInfo object
11196 * from SegmentLoader
11197 * @param {Function} progressFn - a callback that receives progress events from the main
11198 * segment's xhr request
11199 * @param {Function} doneFn - a callback that is executed only once all requests have
11200 * succeeded or failed
11201 * @returns {Function} a function that, when invoked, immediately aborts all
11202 * outstanding requests
11203 */
11204var mediaSegmentRequest = function mediaSegmentRequest(xhr, xhrOptions, decryptionWorker, captionParser, segment, progressFn, doneFn) {
11205 var activeXhrs = [];
11206 var finishProcessingFn = waitForCompletion(activeXhrs, decryptionWorker, doneFn);
11207
11208 // optionally, request the decryption key
11209 if (segment.key && !segment.key.bytes) {
11210 var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
11211 uri: segment.key.resolvedUri,
11212 responseType: 'arraybuffer'
11213 });
11214 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
11215 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
11216
11217 activeXhrs.push(keyXhr);
11218 }
11219
11220 // optionally, request the associated media init segment
11221 if (segment.map && !segment.map.bytes) {
11222 var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
11223 uri: segment.map.resolvedUri,
11224 responseType: 'arraybuffer',
11225 headers: segmentXhrHeaders(segment.map)
11226 });
11227 var initSegmentRequestCallback = handleInitSegmentResponse(segment, captionParser, finishProcessingFn);
11228 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
11229
11230 activeXhrs.push(initSegmentXhr);
11231 }
11232
11233 var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
11234 uri: segment.resolvedUri,
11235 responseType: 'arraybuffer',
11236 headers: segmentXhrHeaders(segment)
11237 });
11238 var segmentRequestCallback = handleSegmentResponse(segment, captionParser, finishProcessingFn);
11239 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
11240
11241 segmentXhr.addEventListener('progress', handleProgress(segment, progressFn));
11242 activeXhrs.push(segmentXhr);
11243
11244 return function () {
11245 return abortAll(activeXhrs);
11246 };
11247};
11248
11249// Utilities
11250
11251/**
11252 * Returns the CSS value for the specified property on an element
11253 * using `getComputedStyle`. Firefox has a long-standing issue where
11254 * getComputedStyle() may return null when running in an iframe with
11255 * `display: none`.
11256 *
11257 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
11258 * @param {HTMLElement} el the htmlelement to work on
11259 * @param {string} the proprety to get the style for
11260 */
11261var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
11262 var result = void 0;
11263
11264 if (!el) {
11265 return '';
11266 }
11267
11268 result = window$1.getComputedStyle(el);
11269 if (!result) {
11270 return '';
11271 }
11272
11273 return result[property];
11274};
11275
11276/**
11277 * Resuable stable sort function
11278 *
11279 * @param {Playlists} array
11280 * @param {Function} sortFn Different comparators
11281 * @function stableSort
11282 */
11283var stableSort = function stableSort(array, sortFn) {
11284 var newArray = array.slice();
11285
11286 array.sort(function (left, right) {
11287 var cmp = sortFn(left, right);
11288
11289 if (cmp === 0) {
11290 return newArray.indexOf(left) - newArray.indexOf(right);
11291 }
11292 return cmp;
11293 });
11294};
11295
11296/**
11297 * A comparator function to sort two playlist object by bandwidth.
11298 *
11299 * @param {Object} left a media playlist object
11300 * @param {Object} right a media playlist object
11301 * @return {Number} Greater than zero if the bandwidth attribute of
11302 * left is greater than the corresponding attribute of right. Less
11303 * than zero if the bandwidth of right is greater than left and
11304 * exactly zero if the two are equal.
11305 */
11306var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
11307 var leftBandwidth = void 0;
11308 var rightBandwidth = void 0;
11309
11310 if (left.attributes.BANDWIDTH) {
11311 leftBandwidth = left.attributes.BANDWIDTH;
11312 }
11313 leftBandwidth = leftBandwidth || window$1.Number.MAX_VALUE;
11314 if (right.attributes.BANDWIDTH) {
11315 rightBandwidth = right.attributes.BANDWIDTH;
11316 }
11317 rightBandwidth = rightBandwidth || window$1.Number.MAX_VALUE;
11318
11319 return leftBandwidth - rightBandwidth;
11320};
11321
11322/**
11323 * A comparator function to sort two playlist object by resolution (width).
11324 * @param {Object} left a media playlist object
11325 * @param {Object} right a media playlist object
11326 * @return {Number} Greater than zero if the resolution.width attribute of
11327 * left is greater than the corresponding attribute of right. Less
11328 * than zero if the resolution.width of right is greater than left and
11329 * exactly zero if the two are equal.
11330 */
11331var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
11332 var leftWidth = void 0;
11333 var rightWidth = void 0;
11334
11335 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
11336 leftWidth = left.attributes.RESOLUTION.width;
11337 }
11338
11339 leftWidth = leftWidth || window$1.Number.MAX_VALUE;
11340
11341 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
11342 rightWidth = right.attributes.RESOLUTION.width;
11343 }
11344
11345 rightWidth = rightWidth || window$1.Number.MAX_VALUE;
11346
11347 // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
11348 // have the same media dimensions/ resolution
11349 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
11350 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
11351 }
11352 return leftWidth - rightWidth;
11353};
11354
11355/**
11356 * Chooses the appropriate media playlist based on bandwidth and player size
11357 *
11358 * @param {Object} master
11359 * Object representation of the master manifest
11360 * @param {Number} playerBandwidth
11361 * Current calculated bandwidth of the player
11362 * @param {Number} playerWidth
11363 * Current width of the player element (should account for the device pixel ratio)
11364 * @param {Number} playerHeight
11365 * Current height of the player element (should account for the device pixel ratio)
11366 * @param {Boolean} limitRenditionByPlayerDimensions
11367 * True if the player width and height should be used during the selection, false otherwise
11368 * @return {Playlist} the highest bitrate playlist less than the
11369 * currently detected bandwidth, accounting for some amount of
11370 * bandwidth variance
11371 */
11372var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions) {
11373 // convert the playlists to an intermediary representation to make comparisons easier
11374 var sortedPlaylistReps = master.playlists.map(function (playlist) {
11375 var width = void 0;
11376 var height = void 0;
11377 var bandwidth = void 0;
11378
11379 width = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
11380 height = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
11381 bandwidth = playlist.attributes.BANDWIDTH;
11382
11383 bandwidth = bandwidth || window$1.Number.MAX_VALUE;
11384
11385 return {
11386 bandwidth: bandwidth,
11387 width: width,
11388 height: height,
11389 playlist: playlist
11390 };
11391 });
11392
11393 stableSort(sortedPlaylistReps, function (left, right) {
11394 return left.bandwidth - right.bandwidth;
11395 });
11396
11397 // filter out any playlists that have been excluded due to
11398 // incompatible configurations
11399 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
11400 return !Playlist.isIncompatible(rep.playlist);
11401 });
11402
11403 // filter out any playlists that have been disabled manually through the representations
11404 // api or blacklisted temporarily due to playback errors.
11405 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
11406 return Playlist.isEnabled(rep.playlist);
11407 });
11408
11409 if (!enabledPlaylistReps.length) {
11410 // if there are no enabled playlists, then they have all been blacklisted or disabled
11411 // by the user through the representations api. In this case, ignore blacklisting and
11412 // fallback to what the user wants by using playlists the user has not disabled.
11413 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
11414 return !Playlist.isDisabled(rep.playlist);
11415 });
11416 }
11417
11418 // filter out any variant that has greater effective bitrate
11419 // than the current estimated bandwidth
11420 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
11421 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
11422 });
11423
11424 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1];
11425
11426 // get all of the renditions with the same (highest) bandwidth
11427 // and then taking the very first element
11428 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
11429 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
11430 })[0];
11431
11432 // if we're not going to limit renditions by player size, make an early decision.
11433 if (limitRenditionByPlayerDimensions === false) {
11434 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
11435
11436 return _chosenRep ? _chosenRep.playlist : null;
11437 }
11438
11439 // filter out playlists without resolution information
11440 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
11441 return rep.width && rep.height;
11442 });
11443
11444 // sort variants by resolution
11445 stableSort(haveResolution, function (left, right) {
11446 return left.width - right.width;
11447 });
11448
11449 // if we have the exact resolution as the player use it
11450 var resolutionBestRepList = haveResolution.filter(function (rep) {
11451 return rep.width === playerWidth && rep.height === playerHeight;
11452 });
11453
11454 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1];
11455 // ensure that we pick the highest bandwidth variant that have exact resolution
11456 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
11457 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
11458 })[0];
11459
11460 var resolutionPlusOneList = void 0;
11461 var resolutionPlusOneSmallest = void 0;
11462 var resolutionPlusOneRep = void 0;
11463
11464 // find the smallest variant that is larger than the player
11465 // if there is no match of exact resolution
11466 if (!resolutionBestRep) {
11467 resolutionPlusOneList = haveResolution.filter(function (rep) {
11468 return rep.width > playerWidth || rep.height > playerHeight;
11469 });
11470
11471 // find all the variants have the same smallest resolution
11472 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
11473 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
11474 });
11475
11476 // ensure that we also pick the highest bandwidth variant that
11477 // is just-larger-than the video player
11478 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
11479 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
11480 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
11481 })[0];
11482 }
11483
11484 // fallback chain of variants
11485 var chosenRep = resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
11486
11487 return chosenRep ? chosenRep.playlist : null;
11488};
11489
11490// Playlist Selectors
11491
11492/**
11493 * Chooses the appropriate media playlist based on the most recent
11494 * bandwidth estimate and the player size.
11495 *
11496 * Expects to be called within the context of an instance of HlsHandler
11497 *
11498 * @return {Playlist} the highest bitrate playlist less than the
11499 * currently detected bandwidth, accounting for some amount of
11500 * bandwidth variance
11501 */
11502var lastBandwidthSelector = function lastBandwidthSelector() {
11503 var pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
11504
11505 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions);
11506};
11507
11508/**
11509 * Chooses the appropriate media playlist based on the potential to rebuffer
11510 *
11511 * @param {Object} settings
11512 * Object of information required to use this selector
11513 * @param {Object} settings.master
11514 * Object representation of the master manifest
11515 * @param {Number} settings.currentTime
11516 * The current time of the player
11517 * @param {Number} settings.bandwidth
11518 * Current measured bandwidth
11519 * @param {Number} settings.duration
11520 * Duration of the media
11521 * @param {Number} settings.segmentDuration
11522 * Segment duration to be used in round trip time calculations
11523 * @param {Number} settings.timeUntilRebuffer
11524 * Time left in seconds until the player has to rebuffer
11525 * @param {Number} settings.currentTimeline
11526 * The current timeline segments are being loaded from
11527 * @param {SyncController} settings.syncController
11528 * SyncController for determining if we have a sync point for a given playlist
11529 * @return {Object|null}
11530 * {Object} return.playlist
11531 * The highest bandwidth playlist with the least amount of rebuffering
11532 * {Number} return.rebufferingImpact
11533 * The amount of time in seconds switching to this playlist will rebuffer. A
11534 * negative value means that switching will cause zero rebuffering.
11535 */
11536var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
11537 var master = settings.master,
11538 currentTime = settings.currentTime,
11539 bandwidth = settings.bandwidth,
11540 duration$$1 = settings.duration,
11541 segmentDuration = settings.segmentDuration,
11542 timeUntilRebuffer = settings.timeUntilRebuffer,
11543 currentTimeline = settings.currentTimeline,
11544 syncController = settings.syncController;
11545
11546 // filter out any playlists that have been excluded due to
11547 // incompatible configurations
11548
11549 var compatiblePlaylists = master.playlists.filter(function (playlist) {
11550 return !Playlist.isIncompatible(playlist);
11551 });
11552
11553 // filter out any playlists that have been disabled manually through the representations
11554 // api or blacklisted temporarily due to playback errors.
11555 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
11556
11557 if (!enabledPlaylists.length) {
11558 // if there are no enabled playlists, then they have all been blacklisted or disabled
11559 // by the user through the representations api. In this case, ignore blacklisting and
11560 // fallback to what the user wants by using playlists the user has not disabled.
11561 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
11562 return !Playlist.isDisabled(playlist);
11563 });
11564 }
11565
11566 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
11567
11568 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
11569 var syncPoint = syncController.getSyncPoint(playlist, duration$$1, currentTimeline, currentTime);
11570 // If there is no sync point for this playlist, switching to it will require a
11571 // sync request first. This will double the request time
11572 var numRequests = syncPoint ? 1 : 2;
11573 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
11574 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
11575
11576 return {
11577 playlist: playlist,
11578 rebufferingImpact: rebufferingImpact
11579 };
11580 });
11581
11582 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
11583 return estimate.rebufferingImpact <= 0;
11584 });
11585
11586 // Sort by bandwidth DESC
11587 stableSort(noRebufferingPlaylists, function (a, b) {
11588 return comparePlaylistBandwidth(b.playlist, a.playlist);
11589 });
11590
11591 if (noRebufferingPlaylists.length) {
11592 return noRebufferingPlaylists[0];
11593 }
11594
11595 stableSort(rebufferingEstimates, function (a, b) {
11596 return a.rebufferingImpact - b.rebufferingImpact;
11597 });
11598
11599 return rebufferingEstimates[0] || null;
11600};
11601
11602/**
11603 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
11604 * one with video. If no renditions with video exist, return the lowest audio rendition.
11605 *
11606 * Expects to be called within the context of an instance of HlsHandler
11607 *
11608 * @return {Object|null}
11609 * {Object} return.playlist
11610 * The lowest bitrate playlist that contains a video codec. If no such rendition
11611 * exists pick the lowest audio rendition.
11612 */
11613var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
11614 // filter out any playlists that have been excluded due to
11615 // incompatible configurations or playback errors
11616 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled);
11617
11618 // Sort ascending by bitrate
11619 stableSort(playlists, function (a, b) {
11620 return comparePlaylistBandwidth(a, b);
11621 });
11622
11623 // Parse and assume that playlists with no video codec have no video
11624 // (this is not necessarily true, although it is generally true).
11625 //
11626 // If an entire manifest has no valid videos everything will get filtered
11627 // out.
11628 var playlistsWithVideo = playlists.filter(function (playlist) {
11629 return parseCodecs(playlist.attributes.CODECS).videoCodec;
11630 });
11631
11632 return playlistsWithVideo[0] || null;
11633};
11634
11635/**
11636 * Create captions text tracks on video.js if they do not exist
11637 *
11638 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
11639 * @param {Object} tech the video.js tech
11640 * @param {Object} captionStreams the caption streams to create
11641 * @private
11642 */
11643var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStreams) {
11644 for (var trackId in captionStreams) {
11645 if (!inbandTextTracks[trackId]) {
11646 tech.trigger({ type: 'usage', name: 'hls-608' });
11647 var track = tech.textTracks().getTrackById(trackId);
11648
11649 if (track) {
11650 // Resuse an existing track with a CC# id because this was
11651 // very likely created by videojs-contrib-hls from information
11652 // in the m3u8 for us to use
11653 inbandTextTracks[trackId] = track;
11654 } else {
11655 // Otherwise, create a track with the default `CC#` label and
11656 // without a language
11657 inbandTextTracks[trackId] = tech.addRemoteTextTrack({
11658 kind: 'captions',
11659 id: trackId,
11660 label: trackId
11661 }, false).track;
11662 }
11663 }
11664 }
11665};
11666
11667var addCaptionData = function addCaptionData(_ref) {
11668 var inbandTextTracks = _ref.inbandTextTracks,
11669 captionArray = _ref.captionArray,
11670 timestampOffset = _ref.timestampOffset;
11671
11672 if (!captionArray) {
11673 return;
11674 }
11675
11676 var Cue = window.WebKitDataCue || window.VTTCue;
11677
11678 captionArray.forEach(function (caption) {
11679 var track = caption.stream;
11680 var startTime = caption.startTime;
11681 var endTime = caption.endTime;
11682
11683 if (!inbandTextTracks[track]) {
11684 return;
11685 }
11686
11687 startTime += timestampOffset;
11688 endTime += timestampOffset;
11689
11690 inbandTextTracks[track].addCue(new Cue(startTime, endTime, caption.text));
11691 });
11692};
11693
11694/**
11695 * @file segment-loader.js
11696 */
11697
11698// in ms
11699var CHECK_BUFFER_DELAY = 500;
11700
11701/**
11702 * Determines if we should call endOfStream on the media source based
11703 * on the state of the buffer or if appened segment was the final
11704 * segment in the playlist.
11705 *
11706 * @param {Object} playlist a media playlist object
11707 * @param {Object} mediaSource the MediaSource object
11708 * @param {Number} segmentIndex the index of segment we last appended
11709 * @returns {Boolean} do we need to call endOfStream on the MediaSource
11710 */
11711var detectEndOfStream = function detectEndOfStream(playlist, mediaSource, segmentIndex) {
11712 if (!playlist || !mediaSource) {
11713 return false;
11714 }
11715
11716 var segments = playlist.segments;
11717
11718 // determine a few boolean values to help make the branch below easier
11719 // to read
11720 var appendedLastSegment = segmentIndex === segments.length;
11721
11722 // if we've buffered to the end of the video, we need to call endOfStream
11723 // so that MediaSources can trigger the `ended` event when it runs out of
11724 // buffered data instead of waiting for me
11725 return playlist.endList && mediaSource.readyState === 'open' && appendedLastSegment;
11726};
11727
11728var finite = function finite(num) {
11729 return typeof num === 'number' && isFinite(num);
11730};
11731
11732var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, newSegmentMedia) {
11733 // Although these checks should most likely cover non 'main' types, for now it narrows
11734 // the scope of our checks.
11735 if (loaderType !== 'main' || !startingMedia || !newSegmentMedia) {
11736 return null;
11737 }
11738
11739 if (!newSegmentMedia.containsAudio && !newSegmentMedia.containsVideo) {
11740 return 'Neither audio nor video found in segment.';
11741 }
11742
11743 if (startingMedia.containsVideo && !newSegmentMedia.containsVideo) {
11744 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
11745 }
11746
11747 if (!startingMedia.containsVideo && newSegmentMedia.containsVideo) {
11748 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
11749 }
11750
11751 return null;
11752};
11753
11754/**
11755 * Calculates a time value that is safe to remove from the back buffer without interupting
11756 * playback.
11757 *
11758 * @param {TimeRange} seekable
11759 * The current seekable range
11760 * @param {Number} currentTime
11761 * The current time of the player
11762 * @param {Number} targetDuration
11763 * The target duration of the current playlist
11764 * @return {Number}
11765 * Time that is safe to remove from the back buffer without interupting playback
11766 */
11767var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable$$1, currentTime, targetDuration) {
11768 // 30 seconds before the playhead provides a safe default for trimming.
11769 //
11770 // Choosing a reasonable default is particularly important for high bitrate content and
11771 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
11772 // throw an APPEND_BUFFER_ERR.
11773 var trimTime = currentTime - 30;
11774
11775 if (seekable$$1.length) {
11776 // Some live playlists may have a shorter window of content than the full allowed back
11777 // buffer. For these playlists, don't save content that's no longer within the window.
11778 trimTime = Math.max(trimTime, seekable$$1.start(0));
11779 }
11780
11781 // Don't remove within target duration of the current time to avoid the possibility of
11782 // removing the GOP currently being played, as removing it can cause playback stalls.
11783 var maxTrimTime = currentTime - targetDuration;
11784
11785 return Math.min(maxTrimTime, trimTime);
11786};
11787
11788var segmentInfoString = function segmentInfoString(segmentInfo) {
11789 var _segmentInfo$segment = segmentInfo.segment,
11790 start = _segmentInfo$segment.start,
11791 end = _segmentInfo$segment.end,
11792 _segmentInfo$playlist = segmentInfo.playlist,
11793 seq = _segmentInfo$playlist.mediaSequence,
11794 id = _segmentInfo$playlist.id,
11795 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
11796 segments = _segmentInfo$playlist2 === undefined ? [] : _segmentInfo$playlist2,
11797 index = segmentInfo.mediaIndex,
11798 timeline = segmentInfo.timeline;
11799
11800
11801 return ['appending [' + index + '] of [' + seq + ', ' + (seq + segments.length) + '] from playlist [' + id + ']', '[' + start + ' => ' + end + '] in timeline [' + timeline + ']'].join(' ');
11802};
11803
11804/**
11805 * An object that manages segment loading and appending.
11806 *
11807 * @class SegmentLoader
11808 * @param {Object} options required and optional options
11809 * @extends videojs.EventTarget
11810 */
11811
11812var SegmentLoader = function (_videojs$EventTarget) {
11813 inherits(SegmentLoader, _videojs$EventTarget);
11814
11815 function SegmentLoader(settings) {
11816 classCallCheck(this, SegmentLoader);
11817
11818 // check pre-conditions
11819 var _this = possibleConstructorReturn(this, (SegmentLoader.__proto__ || Object.getPrototypeOf(SegmentLoader)).call(this));
11820
11821 if (!settings) {
11822 throw new TypeError('Initialization settings are required');
11823 }
11824 if (typeof settings.currentTime !== 'function') {
11825 throw new TypeError('No currentTime getter specified');
11826 }
11827 if (!settings.mediaSource) {
11828 throw new TypeError('No MediaSource specified');
11829 }
11830 // public properties
11831 _this.bandwidth = settings.bandwidth;
11832 _this.throughput = { rate: 0, count: 0 };
11833 _this.roundTrip = NaN;
11834 _this.resetStats_();
11835 _this.mediaIndex = null;
11836
11837 // private settings
11838 _this.hasPlayed_ = settings.hasPlayed;
11839 _this.currentTime_ = settings.currentTime;
11840 _this.seekable_ = settings.seekable;
11841 _this.seeking_ = settings.seeking;
11842 _this.duration_ = settings.duration;
11843 _this.mediaSource_ = settings.mediaSource;
11844 _this.hls_ = settings.hls;
11845 _this.loaderType_ = settings.loaderType;
11846 _this.startingMedia_ = void 0;
11847 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
11848 _this.goalBufferLength_ = settings.goalBufferLength;
11849 _this.sourceType_ = settings.sourceType;
11850 _this.inbandTextTracks_ = settings.inbandTextTracks;
11851 _this.state_ = 'INIT';
11852
11853 // private instance variables
11854 _this.checkBufferTimeout_ = null;
11855 _this.error_ = void 0;
11856 _this.currentTimeline_ = -1;
11857 _this.pendingSegment_ = null;
11858 _this.mimeType_ = null;
11859 _this.sourceUpdater_ = null;
11860 _this.xhrOptions_ = null;
11861
11862 // Fragmented mp4 playback
11863 _this.activeInitSegmentId_ = null;
11864 _this.initSegments_ = {};
11865
11866 // HLSe playback
11867 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
11868 _this.keyCache_ = {};
11869
11870 // Fmp4 CaptionParser
11871 if (_this.loaderType_ === 'main') {
11872 _this.captionParser_ = new CaptionParser();
11873 } else {
11874 _this.captionParser_ = null;
11875 }
11876
11877 _this.decrypter_ = settings.decrypter;
11878
11879 // Manages the tracking and generation of sync-points, mappings
11880 // between a time in the display time and a segment index within
11881 // a playlist
11882 _this.syncController_ = settings.syncController;
11883 _this.syncPoint_ = {
11884 segmentIndex: 0,
11885 time: 0
11886 };
11887
11888 _this.triggerSyncInfoUpdate_ = function () {
11889 return _this.trigger('syncinfoupdate');
11890 };
11891 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
11892
11893 _this.mediaSource_.addEventListener('sourceopen', function () {
11894 return _this.ended_ = false;
11895 });
11896
11897 // ...for determining the fetch location
11898 _this.fetchAtBuffer_ = false;
11899
11900 _this.logger_ = logger('SegmentLoader[' + _this.loaderType_ + ']');
11901
11902 Object.defineProperty(_this, 'state', {
11903 get: function get$$1() {
11904 return this.state_;
11905 },
11906 set: function set$$1(newState) {
11907 if (newState !== this.state_) {
11908 this.logger_(this.state_ + ' -> ' + newState);
11909 this.state_ = newState;
11910 }
11911 }
11912 });
11913 return _this;
11914 }
11915
11916 /**
11917 * reset all of our media stats
11918 *
11919 * @private
11920 */
11921
11922
11923 createClass(SegmentLoader, [{
11924 key: 'resetStats_',
11925 value: function resetStats_() {
11926 this.mediaBytesTransferred = 0;
11927 this.mediaRequests = 0;
11928 this.mediaRequestsAborted = 0;
11929 this.mediaRequestsTimedout = 0;
11930 this.mediaRequestsErrored = 0;
11931 this.mediaTransferDuration = 0;
11932 this.mediaSecondsLoaded = 0;
11933 }
11934
11935 /**
11936 * dispose of the SegmentLoader and reset to the default state
11937 */
11938
11939 }, {
11940 key: 'dispose',
11941 value: function dispose() {
11942 this.trigger('dispose');
11943 this.state = 'DISPOSED';
11944 this.pause();
11945 this.abort_();
11946 if (this.sourceUpdater_) {
11947 this.sourceUpdater_.dispose();
11948 }
11949 this.resetStats_();
11950 if (this.captionParser_) {
11951 this.captionParser_.reset();
11952 }
11953
11954 if (this.checkBufferTimeout_) {
11955 window$1.clearTimeout(this.checkBufferTimeout_);
11956 }
11957
11958 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
11959 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
11960 }
11961
11962 this.off();
11963 }
11964
11965 /**
11966 * abort anything that is currently doing on with the SegmentLoader
11967 * and reset to a default state
11968 */
11969
11970 }, {
11971 key: 'abort',
11972 value: function abort() {
11973 if (this.state !== 'WAITING') {
11974 if (this.pendingSegment_) {
11975 this.pendingSegment_ = null;
11976 }
11977 return;
11978 }
11979
11980 this.abort_();
11981
11982 // We aborted the requests we were waiting on, so reset the loader's state to READY
11983 // since we are no longer "waiting" on any requests. XHR callback is not always run
11984 // when the request is aborted. This will prevent the loader from being stuck in the
11985 // WAITING state indefinitely.
11986 this.state = 'READY';
11987
11988 // don't wait for buffer check timeouts to begin fetching the
11989 // next segment
11990 if (!this.paused()) {
11991 this.monitorBuffer_();
11992 }
11993 }
11994
11995 /**
11996 * abort all pending xhr requests and null any pending segements
11997 *
11998 * @private
11999 */
12000
12001 }, {
12002 key: 'abort_',
12003 value: function abort_() {
12004 if (this.pendingSegment_) {
12005 this.pendingSegment_.abortRequests();
12006 }
12007
12008 // clear out the segment being processed
12009 this.pendingSegment_ = null;
12010 }
12011
12012 /**
12013 * set an error on the segment loader and null out any pending segements
12014 *
12015 * @param {Error} error the error to set on the SegmentLoader
12016 * @return {Error} the error that was set or that is currently set
12017 */
12018
12019 }, {
12020 key: 'error',
12021 value: function error(_error) {
12022 if (typeof _error !== 'undefined') {
12023 this.error_ = _error;
12024 }
12025
12026 this.pendingSegment_ = null;
12027 return this.error_;
12028 }
12029 }, {
12030 key: 'endOfStream',
12031 value: function endOfStream() {
12032 this.ended_ = true;
12033 this.pause();
12034 this.trigger('ended');
12035 }
12036
12037 /**
12038 * Indicates which time ranges are buffered
12039 *
12040 * @return {TimeRange}
12041 * TimeRange object representing the current buffered ranges
12042 */
12043
12044 }, {
12045 key: 'buffered_',
12046 value: function buffered_() {
12047 if (!this.sourceUpdater_) {
12048 return videojs.createTimeRanges();
12049 }
12050
12051 return this.sourceUpdater_.buffered();
12052 }
12053
12054 /**
12055 * Gets and sets init segment for the provided map
12056 *
12057 * @param {Object} map
12058 * The map object representing the init segment to get or set
12059 * @param {Boolean=} set
12060 * If true, the init segment for the provided map should be saved
12061 * @return {Object}
12062 * map object for desired init segment
12063 */
12064
12065 }, {
12066 key: 'initSegment',
12067 value: function initSegment(map) {
12068 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
12069
12070 if (!map) {
12071 return null;
12072 }
12073
12074 var id = initSegmentId(map);
12075 var storedMap = this.initSegments_[id];
12076
12077 if (set$$1 && !storedMap && map.bytes) {
12078 this.initSegments_[id] = storedMap = {
12079 resolvedUri: map.resolvedUri,
12080 byterange: map.byterange,
12081 bytes: map.bytes,
12082 timescales: map.timescales,
12083 videoTrackIds: map.videoTrackIds
12084 };
12085 }
12086
12087 return storedMap || map;
12088 }
12089
12090 /**
12091 * Gets and sets key for the provided key
12092 *
12093 * @param {Object} key
12094 * The key object representing the key to get or set
12095 * @param {Boolean=} set
12096 * If true, the key for the provided key should be saved
12097 * @return {Object}
12098 * Key object for desired key
12099 */
12100
12101 }, {
12102 key: 'segmentKey',
12103 value: function segmentKey(key) {
12104 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
12105
12106 if (!key) {
12107 return null;
12108 }
12109
12110 var id = segmentKeyId(key);
12111 var storedKey = this.keyCache_[id];
12112
12113 // TODO: We should use the HTTP Expires header to invalidate our cache per
12114 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
12115 if (this.cacheEncryptionKeys_ && set$$1 && !storedKey && key.bytes) {
12116 this.keyCache_[id] = storedKey = {
12117 resolvedUri: key.resolvedUri,
12118 bytes: key.bytes
12119 };
12120 }
12121
12122 var result = {
12123 resolvedUri: (storedKey || key).resolvedUri
12124 };
12125
12126 if (storedKey) {
12127 result.bytes = storedKey.bytes;
12128 }
12129
12130 return result;
12131 }
12132
12133 /**
12134 * Returns true if all configuration required for loading is present, otherwise false.
12135 *
12136 * @return {Boolean} True if the all configuration is ready for loading
12137 * @private
12138 */
12139
12140 }, {
12141 key: 'couldBeginLoading_',
12142 value: function couldBeginLoading_() {
12143 return this.playlist_ && (
12144 // the source updater is created when init_ is called, so either having a
12145 // source updater or being in the INIT state with a mimeType is enough
12146 // to say we have all the needed configuration to start loading.
12147 this.sourceUpdater_ || this.mimeType_ && this.state === 'INIT') && !this.paused();
12148 }
12149
12150 /**
12151 * load a playlist and start to fill the buffer
12152 */
12153
12154 }, {
12155 key: 'load',
12156 value: function load() {
12157 // un-pause
12158 this.monitorBuffer_();
12159
12160 // if we don't have a playlist yet, keep waiting for one to be
12161 // specified
12162 if (!this.playlist_) {
12163 return;
12164 }
12165
12166 // not sure if this is the best place for this
12167 this.syncController_.setDateTimeMapping(this.playlist_);
12168
12169 // if all the configuration is ready, initialize and begin loading
12170 if (this.state === 'INIT' && this.couldBeginLoading_()) {
12171 return this.init_();
12172 }
12173
12174 // if we're in the middle of processing a segment already, don't
12175 // kick off an additional segment request
12176 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
12177 return;
12178 }
12179
12180 this.state = 'READY';
12181 }
12182
12183 /**
12184 * Once all the starting parameters have been specified, begin
12185 * operation. This method should only be invoked from the INIT
12186 * state.
12187 *
12188 * @private
12189 */
12190
12191 }, {
12192 key: 'init_',
12193 value: function init_() {
12194 this.state = 'READY';
12195 this.sourceUpdater_ = new SourceUpdater(this.mediaSource_, this.mimeType_, this.loaderType_, this.sourceBufferEmitter_);
12196 this.resetEverything();
12197 return this.monitorBuffer_();
12198 }
12199
12200 /**
12201 * set a playlist on the segment loader
12202 *
12203 * @param {PlaylistLoader} media the playlist to set on the segment loader
12204 */
12205
12206 }, {
12207 key: 'playlist',
12208 value: function playlist(newPlaylist) {
12209 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
12210
12211 if (!newPlaylist) {
12212 return;
12213 }
12214
12215 var oldPlaylist = this.playlist_;
12216 var segmentInfo = this.pendingSegment_;
12217
12218 this.playlist_ = newPlaylist;
12219 this.xhrOptions_ = options;
12220
12221 // when we haven't started playing yet, the start of a live playlist
12222 // is always our zero-time so force a sync update each time the playlist
12223 // is refreshed from the server
12224 //
12225 // Use the INIT state to determine if playback has started, as the playlist sync info
12226 // should be fixed once requests begin (as sync points are generated based on sync
12227 // info), but not before then.
12228 if (this.state === 'INIT') {
12229 newPlaylist.syncInfo = {
12230 mediaSequence: newPlaylist.mediaSequence,
12231 time: 0
12232 };
12233 }
12234
12235 var oldId = null;
12236
12237 if (oldPlaylist) {
12238 if (oldPlaylist.id) {
12239 oldId = oldPlaylist.id;
12240 } else if (oldPlaylist.uri) {
12241 oldId = oldPlaylist.uri;
12242 }
12243 }
12244
12245 this.logger_('playlist update [' + oldId + ' => ' + (newPlaylist.id || newPlaylist.uri) + ']');
12246
12247 // in VOD, this is always a rendition switch (or we updated our syncInfo above)
12248 // in LIVE, we always want to update with new playlists (including refreshes)
12249 this.trigger('syncinfoupdate');
12250
12251 // if we were unpaused but waiting for a playlist, start
12252 // buffering now
12253 if (this.state === 'INIT' && this.couldBeginLoading_()) {
12254 return this.init_();
12255 }
12256
12257 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
12258 if (this.mediaIndex !== null) {
12259 // we must "resync" the segment loader when we switch renditions and
12260 // the segment loader is already synced to the previous rendition
12261 this.resyncLoader();
12262 }
12263
12264 // the rest of this function depends on `oldPlaylist` being defined
12265 return;
12266 }
12267
12268 // we reloaded the same playlist so we are in a live scenario
12269 // and we will likely need to adjust the mediaIndex
12270 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
12271
12272 this.logger_('live window shift [' + mediaSequenceDiff + ']');
12273
12274 // update the mediaIndex on the SegmentLoader
12275 // this is important because we can abort a request and this value must be
12276 // equal to the last appended mediaIndex
12277 if (this.mediaIndex !== null) {
12278 this.mediaIndex -= mediaSequenceDiff;
12279 }
12280
12281 // update the mediaIndex on the SegmentInfo object
12282 // this is important because we will update this.mediaIndex with this value
12283 // in `handleUpdateEnd_` after the segment has been successfully appended
12284 if (segmentInfo) {
12285 segmentInfo.mediaIndex -= mediaSequenceDiff;
12286
12287 // we need to update the referenced segment so that timing information is
12288 // saved for the new playlist's segment, however, if the segment fell off the
12289 // playlist, we can leave the old reference and just lose the timing info
12290 if (segmentInfo.mediaIndex >= 0) {
12291 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
12292 }
12293 }
12294
12295 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
12296 }
12297
12298 /**
12299 * Prevent the loader from fetching additional segments. If there
12300 * is a segment request outstanding, it will finish processing
12301 * before the loader halts. A segment loader can be unpaused by
12302 * calling load().
12303 */
12304
12305 }, {
12306 key: 'pause',
12307 value: function pause() {
12308 if (this.checkBufferTimeout_) {
12309 window$1.clearTimeout(this.checkBufferTimeout_);
12310
12311 this.checkBufferTimeout_ = null;
12312 }
12313 }
12314
12315 /**
12316 * Returns whether the segment loader is fetching additional
12317 * segments when given the opportunity. This property can be
12318 * modified through calls to pause() and load().
12319 */
12320
12321 }, {
12322 key: 'paused',
12323 value: function paused() {
12324 return this.checkBufferTimeout_ === null;
12325 }
12326
12327 /**
12328 * create/set the following mimetype on the SourceBuffer through a
12329 * SourceUpdater
12330 *
12331 * @param {String} mimeType the mime type string to use
12332 * @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer
12333 * is added to the media source
12334 */
12335
12336 }, {
12337 key: 'mimeType',
12338 value: function mimeType(_mimeType, sourceBufferEmitter) {
12339 if (this.mimeType_) {
12340 return;
12341 }
12342
12343 this.mimeType_ = _mimeType;
12344 this.sourceBufferEmitter_ = sourceBufferEmitter;
12345 // if we were unpaused but waiting for a sourceUpdater, start
12346 // buffering now
12347 if (this.state === 'INIT' && this.couldBeginLoading_()) {
12348 this.init_();
12349 }
12350 }
12351
12352 /**
12353 * Delete all the buffered data and reset the SegmentLoader
12354 * @param {Function} [done] an optional callback to be executed when the remove
12355 * operation is complete
12356 */
12357
12358 }, {
12359 key: 'resetEverything',
12360 value: function resetEverything(done) {
12361 this.ended_ = false;
12362 this.resetLoader();
12363
12364 // remove from 0, the earliest point, to Infinity, to signify removal of everything.
12365 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
12366 // we then clamp the value to duration if necessary.
12367 this.remove(0, Infinity, done);
12368
12369 // clears fmp4 captions
12370 if (this.captionParser_) {
12371 this.captionParser_.clearAllCaptions();
12372 }
12373 this.trigger('reseteverything');
12374 }
12375
12376 /**
12377 * Force the SegmentLoader to resync and start loading around the currentTime instead
12378 * of starting at the end of the buffer
12379 *
12380 * Useful for fast quality changes
12381 */
12382
12383 }, {
12384 key: 'resetLoader',
12385 value: function resetLoader() {
12386 this.fetchAtBuffer_ = false;
12387 this.resyncLoader();
12388 }
12389
12390 /**
12391 * Force the SegmentLoader to restart synchronization and make a conservative guess
12392 * before returning to the simple walk-forward method
12393 */
12394
12395 }, {
12396 key: 'resyncLoader',
12397 value: function resyncLoader() {
12398 this.mediaIndex = null;
12399 this.syncPoint_ = null;
12400 this.abort();
12401 }
12402
12403 /**
12404 * Remove any data in the source buffer between start and end times
12405 * @param {Number} start - the start time of the region to remove from the buffer
12406 * @param {Number} end - the end time of the region to remove from the buffer
12407 * @param {Function} [done] - an optional callback to be executed when the remove
12408 * operation is complete
12409 */
12410
12411 }, {
12412 key: 'remove',
12413 value: function remove(start, end, done) {
12414 // clamp end to duration if we need to remove everything.
12415 // This is due to a browser bug that causes issues if we remove to Infinity.
12416 // videojs/videojs-contrib-hls#1225
12417 if (end === Infinity) {
12418 end = this.duration_();
12419 }
12420
12421 if (this.sourceUpdater_) {
12422 this.sourceUpdater_.remove(start, end, done);
12423 }
12424 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
12425
12426 if (this.inbandTextTracks_) {
12427 for (var id in this.inbandTextTracks_) {
12428 removeCuesFromTrack(start, end, this.inbandTextTracks_[id]);
12429 }
12430 }
12431 }
12432
12433 /**
12434 * (re-)schedule monitorBufferTick_ to run as soon as possible
12435 *
12436 * @private
12437 */
12438
12439 }, {
12440 key: 'monitorBuffer_',
12441 value: function monitorBuffer_() {
12442 if (this.checkBufferTimeout_) {
12443 window$1.clearTimeout(this.checkBufferTimeout_);
12444 }
12445
12446 this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), 1);
12447 }
12448
12449 /**
12450 * As long as the SegmentLoader is in the READY state, periodically
12451 * invoke fillBuffer_().
12452 *
12453 * @private
12454 */
12455
12456 }, {
12457 key: 'monitorBufferTick_',
12458 value: function monitorBufferTick_() {
12459 if (this.state === 'READY') {
12460 this.fillBuffer_();
12461 }
12462
12463 if (this.checkBufferTimeout_) {
12464 window$1.clearTimeout(this.checkBufferTimeout_);
12465 }
12466
12467 this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
12468 }
12469
12470 /**
12471 * fill the buffer with segements unless the sourceBuffers are
12472 * currently updating
12473 *
12474 * Note: this function should only ever be called by monitorBuffer_
12475 * and never directly
12476 *
12477 * @private
12478 */
12479
12480 }, {
12481 key: 'fillBuffer_',
12482 value: function fillBuffer_() {
12483 if (this.sourceUpdater_.updating()) {
12484 return;
12485 }
12486
12487 if (!this.syncPoint_) {
12488 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
12489 }
12490
12491 // see if we need to begin loading immediately
12492 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
12493
12494 if (!segmentInfo) {
12495 return;
12496 }
12497
12498 if (this.isEndOfStream_(segmentInfo.mediaIndex)) {
12499 this.endOfStream();
12500 return;
12501 }
12502
12503 if (segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
12504 return;
12505 }
12506
12507 // We will need to change timestampOffset of the sourceBuffer if:
12508 // - The segment.timeline !== this.currentTimeline
12509 // (we are crossing a discontinuity somehow)
12510 // - The "timestampOffset" for the start of this segment is less than
12511 // the currently set timestampOffset
12512 // Also, clear captions if we are crossing a discontinuity boundary
12513 // Previously, we changed the timestampOffset if the start of this segment
12514 // is less than the currently set timestampOffset but this isn't wanted
12515 // as it can produce bad behavior, especially around long running
12516 // live streams
12517 if (segmentInfo.timeline !== this.currentTimeline_) {
12518 this.syncController_.reset();
12519 segmentInfo.timestampOffset = segmentInfo.startOfSegment;
12520 if (this.captionParser_) {
12521 this.captionParser_.clearAllCaptions();
12522 }
12523 }
12524
12525 this.loadSegment_(segmentInfo);
12526 }
12527
12528 /**
12529 * Determines if this segment loader is at the end of it's stream.
12530 *
12531 * @param {Number} mediaIndex the index of segment we last appended
12532 * @param {Object} [playlist=this.playlist_] a media playlist object
12533 * @returns {Boolean} true if at end of stream, false otherwise.
12534 */
12535
12536 }, {
12537 key: 'isEndOfStream_',
12538 value: function isEndOfStream_(mediaIndex) {
12539 var playlist = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : this.playlist_;
12540
12541 return detectEndOfStream(playlist, this.mediaSource_, mediaIndex) && !this.sourceUpdater_.updating();
12542 }
12543
12544 /**
12545 * Determines what segment request should be made, given current playback
12546 * state.
12547 *
12548 * @param {TimeRanges} buffered - the state of the buffer
12549 * @param {Object} playlist - the playlist object to fetch segments from
12550 * @param {Number} mediaIndex - the previous mediaIndex fetched or null
12551 * @param {Boolean} hasPlayed - a flag indicating whether we have played or not
12552 * @param {Number} currentTime - the playback position in seconds
12553 * @param {Object} syncPoint - a segment info object that describes the
12554 * @returns {Object} a segment request object that describes the segment to load
12555 */
12556
12557 }, {
12558 key: 'checkBuffer_',
12559 value: function checkBuffer_(buffered, playlist, mediaIndex, hasPlayed, currentTime, syncPoint) {
12560 var lastBufferedEnd = 0;
12561 var startOfSegment = void 0;
12562
12563 if (buffered.length) {
12564 lastBufferedEnd = buffered.end(buffered.length - 1);
12565 }
12566
12567 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
12568
12569 if (!playlist.segments.length) {
12570 return null;
12571 }
12572
12573 // if there is plenty of content buffered, and the video has
12574 // been played before relax for awhile
12575 if (bufferedTime >= this.goalBufferLength_()) {
12576 return null;
12577 }
12578
12579 // if the video has not yet played once, and we already have
12580 // one segment downloaded do nothing
12581 if (!hasPlayed && bufferedTime >= 1) {
12582 return null;
12583 }
12584
12585 // When the syncPoint is null, there is no way of determining a good
12586 // conservative segment index to fetch from
12587 // The best thing to do here is to get the kind of sync-point data by
12588 // making a request
12589 if (syncPoint === null) {
12590 mediaIndex = this.getSyncSegmentCandidate_(playlist);
12591 return this.generateSegmentInfo_(playlist, mediaIndex, null, true);
12592 }
12593
12594 // Under normal playback conditions fetching is a simple walk forward
12595 if (mediaIndex !== null) {
12596 var segment = playlist.segments[mediaIndex];
12597
12598 startOfSegment = lastBufferedEnd;
12599
12600 return this.generateSegmentInfo_(playlist, mediaIndex + 1, startOfSegment, false);
12601 }
12602
12603 // There is a sync-point but the lack of a mediaIndex indicates that
12604 // we need to make a good conservative guess about which segment to
12605 // fetch
12606 if (this.fetchAtBuffer_) {
12607 // Find the segment containing the end of the buffer
12608 var mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, lastBufferedEnd, syncPoint.segmentIndex, syncPoint.time);
12609
12610 mediaIndex = mediaSourceInfo.mediaIndex;
12611 startOfSegment = mediaSourceInfo.startTime;
12612 } else {
12613 // Find the segment containing currentTime
12614 var _mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, currentTime, syncPoint.segmentIndex, syncPoint.time);
12615
12616 mediaIndex = _mediaSourceInfo.mediaIndex;
12617 startOfSegment = _mediaSourceInfo.startTime;
12618 }
12619
12620 return this.generateSegmentInfo_(playlist, mediaIndex, startOfSegment, false);
12621 }
12622
12623 /**
12624 * The segment loader has no recourse except to fetch a segment in the
12625 * current playlist and use the internal timestamps in that segment to
12626 * generate a syncPoint. This function returns a good candidate index
12627 * for that process.
12628 *
12629 * @param {Object} playlist - the playlist object to look for a
12630 * @returns {Number} An index of a segment from the playlist to load
12631 */
12632
12633 }, {
12634 key: 'getSyncSegmentCandidate_',
12635 value: function getSyncSegmentCandidate_(playlist) {
12636 var _this2 = this;
12637
12638 if (this.currentTimeline_ === -1) {
12639 return 0;
12640 }
12641
12642 var segmentIndexArray = playlist.segments.map(function (s, i) {
12643 return {
12644 timeline: s.timeline,
12645 segmentIndex: i
12646 };
12647 }).filter(function (s) {
12648 return s.timeline === _this2.currentTimeline_;
12649 });
12650
12651 if (segmentIndexArray.length) {
12652 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
12653 }
12654
12655 return Math.max(playlist.segments.length - 1, 0);
12656 }
12657 }, {
12658 key: 'generateSegmentInfo_',
12659 value: function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest) {
12660 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
12661 return null;
12662 }
12663
12664 var segment = playlist.segments[mediaIndex];
12665
12666 return {
12667 requestId: 'segment-loader-' + Math.random(),
12668 // resolve the segment URL relative to the playlist
12669 uri: segment.resolvedUri,
12670 // the segment's mediaIndex at the time it was requested
12671 mediaIndex: mediaIndex,
12672 // whether or not to update the SegmentLoader's state with this
12673 // segment's mediaIndex
12674 isSyncRequest: isSyncRequest,
12675 startOfSegment: startOfSegment,
12676 // the segment's playlist
12677 playlist: playlist,
12678 // unencrypted bytes of the segment
12679 bytes: null,
12680 // when a key is defined for this segment, the encrypted bytes
12681 encryptedBytes: null,
12682 // The target timestampOffset for this segment when we append it
12683 // to the source buffer
12684 timestampOffset: null,
12685 // The timeline that the segment is in
12686 timeline: segment.timeline,
12687 // The expected duration of the segment in seconds
12688 duration: segment.duration,
12689 // retain the segment in case the playlist updates while doing an async process
12690 segment: segment
12691 };
12692 }
12693
12694 /**
12695 * Determines if the network has enough bandwidth to complete the current segment
12696 * request in a timely manner. If not, the request will be aborted early and bandwidth
12697 * updated to trigger a playlist switch.
12698 *
12699 * @param {Object} stats
12700 * Object containing stats about the request timing and size
12701 * @return {Boolean} True if the request was aborted, false otherwise
12702 * @private
12703 */
12704
12705 }, {
12706 key: 'abortRequestEarly_',
12707 value: function abortRequestEarly_(stats) {
12708 if (this.hls_.tech_.paused() ||
12709 // Don't abort if the current playlist is on the lowestEnabledRendition
12710 // TODO: Replace using timeout with a boolean indicating whether this playlist is
12711 // the lowestEnabledRendition.
12712 !this.xhrOptions_.timeout ||
12713 // Don't abort if we have no bandwidth information to estimate segment sizes
12714 !this.playlist_.attributes.BANDWIDTH) {
12715 return false;
12716 }
12717
12718 // Wait at least 1 second since the first byte of data has been received before
12719 // using the calculated bandwidth from the progress event to allow the bitrate
12720 // to stabilize
12721 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
12722 return false;
12723 }
12724
12725 var currentTime = this.currentTime_();
12726 var measuredBandwidth = stats.bandwidth;
12727 var segmentDuration = this.pendingSegment_.duration;
12728
12729 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived);
12730
12731 // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
12732 // if we are only left with less than 1 second when the request completes.
12733 // A negative timeUntilRebuffering indicates we are already rebuffering
12734 var timeUntilRebuffer$$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.hls_.tech_.playbackRate()) - 1;
12735
12736 // Only consider aborting early if the estimated time to finish the download
12737 // is larger than the estimated time until the player runs out of forward buffer
12738 if (requestTimeRemaining <= timeUntilRebuffer$$1) {
12739 return false;
12740 }
12741
12742 var switchCandidate = minRebufferMaxBandwidthSelector({
12743 master: this.hls_.playlists.master,
12744 currentTime: currentTime,
12745 bandwidth: measuredBandwidth,
12746 duration: this.duration_(),
12747 segmentDuration: segmentDuration,
12748 timeUntilRebuffer: timeUntilRebuffer$$1,
12749 currentTimeline: this.currentTimeline_,
12750 syncController: this.syncController_
12751 });
12752
12753 if (!switchCandidate) {
12754 return;
12755 }
12756
12757 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$$1;
12758
12759 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
12760
12761 var minimumTimeSaving = 0.5;
12762
12763 // If we are already rebuffering, increase the amount of variance we add to the
12764 // potential round trip time of the new request so that we are not too aggressive
12765 // with switching to a playlist that might save us a fraction of a second.
12766 if (timeUntilRebuffer$$1 <= TIME_FUDGE_FACTOR) {
12767 minimumTimeSaving = 1;
12768 }
12769
12770 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
12771 return false;
12772 }
12773
12774 // set the bandwidth to that of the desired playlist being sure to scale by
12775 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
12776 // don't trigger a bandwidthupdate as the bandwidth is artifial
12777 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
12778 this.abort();
12779 this.trigger('earlyabort');
12780 return true;
12781 }
12782
12783 /**
12784 * XHR `progress` event handler
12785 *
12786 * @param {Event}
12787 * The XHR `progress` event
12788 * @param {Object} simpleSegment
12789 * A simplified segment object copy
12790 * @private
12791 */
12792
12793 }, {
12794 key: 'handleProgress_',
12795 value: function handleProgress_(event, simpleSegment) {
12796 if (!this.pendingSegment_ || simpleSegment.requestId !== this.pendingSegment_.requestId || this.abortRequestEarly_(simpleSegment.stats)) {
12797 return;
12798 }
12799
12800 this.trigger('progress');
12801 }
12802
12803 /**
12804 * load a specific segment from a request into the buffer
12805 *
12806 * @private
12807 */
12808
12809 }, {
12810 key: 'loadSegment_',
12811 value: function loadSegment_(segmentInfo) {
12812 this.state = 'WAITING';
12813 this.pendingSegment_ = segmentInfo;
12814 this.trimBackBuffer_(segmentInfo);
12815
12816 segmentInfo.abortRequests = mediaSegmentRequest(this.hls_.xhr, this.xhrOptions_, this.decrypter_, this.captionParser_, this.createSimplifiedSegmentObj_(segmentInfo),
12817 // progress callback
12818 this.handleProgress_.bind(this), this.segmentRequestFinished_.bind(this));
12819 }
12820
12821 /**
12822 * trim the back buffer so that we don't have too much data
12823 * in the source buffer
12824 *
12825 * @private
12826 *
12827 * @param {Object} segmentInfo - the current segment
12828 */
12829
12830 }, {
12831 key: 'trimBackBuffer_',
12832 value: function trimBackBuffer_(segmentInfo) {
12833 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10);
12834
12835 // Chrome has a hard limit of 150MB of
12836 // buffer and a very conservative "garbage collector"
12837 // We manually clear out the old buffer to ensure
12838 // we don't trigger the QuotaExceeded error
12839 // on the source buffer during subsequent appends
12840
12841 if (removeToTime > 0) {
12842 this.remove(0, removeToTime);
12843 }
12844 }
12845
12846 /**
12847 * created a simplified copy of the segment object with just the
12848 * information necessary to perform the XHR and decryption
12849 *
12850 * @private
12851 *
12852 * @param {Object} segmentInfo - the current segment
12853 * @returns {Object} a simplified segment object copy
12854 */
12855
12856 }, {
12857 key: 'createSimplifiedSegmentObj_',
12858 value: function createSimplifiedSegmentObj_(segmentInfo) {
12859 var segment = segmentInfo.segment;
12860 var simpleSegment = {
12861 resolvedUri: segment.resolvedUri,
12862 byterange: segment.byterange,
12863 requestId: segmentInfo.requestId
12864 };
12865
12866 if (segment.key) {
12867 // if the media sequence is greater than 2^32, the IV will be incorrect
12868 // assuming 10s segments, that would be about 1300 years
12869 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
12870
12871 simpleSegment.key = this.segmentKey(segment.key);
12872 simpleSegment.key.iv = iv;
12873 }
12874
12875 if (segment.map) {
12876 simpleSegment.map = this.initSegment(segment.map);
12877 }
12878
12879 return simpleSegment;
12880 }
12881
12882 /**
12883 * Handle the callback from the segmentRequest function and set the
12884 * associated SegmentLoader state and errors if necessary
12885 *
12886 * @private
12887 */
12888
12889 }, {
12890 key: 'segmentRequestFinished_',
12891 value: function segmentRequestFinished_(error, simpleSegment) {
12892 // every request counts as a media request even if it has been aborted
12893 // or canceled due to a timeout
12894 this.mediaRequests += 1;
12895
12896 if (simpleSegment.stats) {
12897 this.mediaBytesTransferred += simpleSegment.stats.bytesReceived;
12898 this.mediaTransferDuration += simpleSegment.stats.roundTripTime;
12899 }
12900
12901 // The request was aborted and the SegmentLoader has already been reset
12902 if (!this.pendingSegment_) {
12903 this.mediaRequestsAborted += 1;
12904 return;
12905 }
12906
12907 // the request was aborted and the SegmentLoader has already started
12908 // another request. this can happen when the timeout for an aborted
12909 // request triggers due to a limitation in the XHR library
12910 // do not count this as any sort of request or we risk double-counting
12911 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
12912 return;
12913 }
12914
12915 // an error occurred from the active pendingSegment_ so reset everything
12916 if (error) {
12917 this.pendingSegment_ = null;
12918 this.state = 'READY';
12919
12920 // the requests were aborted just record the aborted stat and exit
12921 // this is not a true error condition and nothing corrective needs
12922 // to be done
12923 if (error.code === REQUEST_ERRORS.ABORTED) {
12924 this.mediaRequestsAborted += 1;
12925 return;
12926 }
12927
12928 this.pause();
12929
12930 // the error is really just that at least one of the requests timed-out
12931 // set the bandwidth to a very low value and trigger an ABR switch to
12932 // take emergency action
12933 if (error.code === REQUEST_ERRORS.TIMEOUT) {
12934 this.mediaRequestsTimedout += 1;
12935 this.bandwidth = 1;
12936 this.roundTrip = NaN;
12937 this.trigger('bandwidthupdate');
12938 return;
12939 }
12940
12941 // if control-flow has arrived here, then the error is real
12942 // emit an error event to blacklist the current playlist
12943 this.mediaRequestsErrored += 1;
12944 this.error(error);
12945 this.trigger('error');
12946 return;
12947 }
12948
12949 // the response was a success so set any bandwidth stats the request
12950 // generated for ABR purposes
12951 this.bandwidth = simpleSegment.stats.bandwidth;
12952 this.roundTrip = simpleSegment.stats.roundTripTime;
12953
12954 // if this request included an initialization segment, save that data
12955 // to the initSegment cache
12956 if (simpleSegment.map) {
12957 simpleSegment.map = this.initSegment(simpleSegment.map, true);
12958 }
12959
12960 // if this request included a segment key, save that data in the cache
12961 if (simpleSegment.key) {
12962 this.segmentKey(simpleSegment.key, true);
12963 }
12964
12965 this.processSegmentResponse_(simpleSegment);
12966 }
12967
12968 /**
12969 * Move any important data from the simplified segment object
12970 * back to the real segment object for future phases
12971 *
12972 * @private
12973 */
12974
12975 }, {
12976 key: 'processSegmentResponse_',
12977 value: function processSegmentResponse_(simpleSegment) {
12978 var segmentInfo = this.pendingSegment_;
12979
12980 segmentInfo.bytes = simpleSegment.bytes;
12981 if (simpleSegment.map) {
12982 segmentInfo.segment.map.bytes = simpleSegment.map.bytes;
12983 }
12984
12985 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
12986
12987 // This has fmp4 captions, add them to text tracks
12988 if (simpleSegment.fmp4Captions) {
12989 createCaptionsTrackIfNotExists(this.inbandTextTracks_, this.hls_.tech_, simpleSegment.captionStreams);
12990 addCaptionData({
12991 inbandTextTracks: this.inbandTextTracks_,
12992 captionArray: simpleSegment.fmp4Captions,
12993 // fmp4s will not have a timestamp offset
12994 timestampOffset: 0
12995 });
12996 // Reset stored captions since we added parsed
12997 // captions to a text track at this point
12998 if (this.captionParser_) {
12999 this.captionParser_.clearParsedCaptions();
13000 }
13001 }
13002
13003 this.handleSegment_();
13004 }
13005
13006 /**
13007 * append a decrypted segement to the SourceBuffer through a SourceUpdater
13008 *
13009 * @private
13010 */
13011
13012 }, {
13013 key: 'handleSegment_',
13014 value: function handleSegment_() {
13015 var _this3 = this;
13016
13017 if (!this.pendingSegment_) {
13018 this.state = 'READY';
13019 return;
13020 }
13021
13022 var segmentInfo = this.pendingSegment_;
13023 var segment = segmentInfo.segment;
13024 var timingInfo = this.syncController_.probeSegmentInfo(segmentInfo);
13025
13026 // When we have our first timing info, determine what media types this loader is
13027 // dealing with. Although we're maintaining extra state, it helps to preserve the
13028 // separation of segment loader from the actual source buffers.
13029 if (typeof this.startingMedia_ === 'undefined' && timingInfo && (
13030 // Guard against cases where we're not getting timing info at all until we are
13031 // certain that all streams will provide it.
13032 timingInfo.containsAudio || timingInfo.containsVideo)) {
13033 this.startingMedia_ = {
13034 containsAudio: timingInfo.containsAudio,
13035 containsVideo: timingInfo.containsVideo
13036 };
13037 }
13038
13039 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.startingMedia_, timingInfo);
13040
13041 if (illegalMediaSwitchError) {
13042 this.error({
13043 message: illegalMediaSwitchError,
13044 blacklistDuration: Infinity
13045 });
13046 this.trigger('error');
13047 return;
13048 }
13049
13050 if (segmentInfo.isSyncRequest) {
13051 this.trigger('syncinfoupdate');
13052 this.pendingSegment_ = null;
13053 this.state = 'READY';
13054 return;
13055 }
13056
13057 if (segmentInfo.timestampOffset !== null && segmentInfo.timestampOffset !== this.sourceUpdater_.timestampOffset()) {
13058
13059 // Subtract any difference between the PTS and DTS times of the first frame
13060 // from the timeStampOffset (which currently equals the buffered.end) to prevent
13061 // creating any gaps in the buffer
13062 if (timingInfo && timingInfo.segmentTimestampInfo) {
13063 var ptsStartTime = timingInfo.segmentTimestampInfo[0].ptsTime;
13064 var dtsStartTime = timingInfo.segmentTimestampInfo[0].dtsTime;
13065
13066 segmentInfo.timestampOffset -= ptsStartTime - dtsStartTime;
13067 }
13068
13069 this.sourceUpdater_.timestampOffset(segmentInfo.timestampOffset);
13070 // fired when a timestamp offset is set in HLS (can also identify discontinuities)
13071 this.trigger('timestampoffset');
13072 }
13073
13074 var timelineMapping = this.syncController_.mappingForTimeline(segmentInfo.timeline);
13075
13076 if (timelineMapping !== null) {
13077 this.trigger({
13078 type: 'segmenttimemapping',
13079 mapping: timelineMapping
13080 });
13081 }
13082
13083 this.state = 'APPENDING';
13084
13085 // if the media initialization segment is changing, append it
13086 // before the content segment
13087 if (segment.map) {
13088 var initId = initSegmentId(segment.map);
13089
13090 if (!this.activeInitSegmentId_ || this.activeInitSegmentId_ !== initId) {
13091 var initSegment = this.initSegment(segment.map);
13092
13093 this.sourceUpdater_.appendBuffer({
13094 bytes: initSegment.bytes
13095 }, function () {
13096 _this3.activeInitSegmentId_ = initId;
13097 });
13098 }
13099 }
13100
13101 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
13102 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
13103 this.mediaSecondsLoaded += segment.end - segment.start;
13104 } else {
13105 this.mediaSecondsLoaded += segment.duration;
13106 }
13107
13108 this.logger_(segmentInfoString(segmentInfo));
13109
13110 this.sourceUpdater_.appendBuffer({
13111 bytes: segmentInfo.bytes,
13112 videoSegmentTimingInfoCallback: this.handleVideoSegmentTimingInfo_.bind(this, segmentInfo.requestId)
13113 }, this.handleUpdateEnd_.bind(this));
13114 }
13115 }, {
13116 key: 'handleVideoSegmentTimingInfo_',
13117 value: function handleVideoSegmentTimingInfo_(requestId, event) {
13118 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
13119 return;
13120 }
13121
13122 var segment = this.pendingSegment_.segment;
13123
13124 if (!segment.videoTimingInfo) {
13125 segment.videoTimingInfo = {};
13126 }
13127
13128 segment.videoTimingInfo.transmuxerPrependedSeconds = event.videoSegmentTimingInfo.prependedContentDuration || 0;
13129 segment.videoTimingInfo.transmuxedPresentationStart = event.videoSegmentTimingInfo.start.presentation;
13130 segment.videoTimingInfo.transmuxedPresentationEnd = event.videoSegmentTimingInfo.end.presentation;
13131 // mainly used as a reference for debugging
13132 segment.videoTimingInfo.baseMediaDecodeTime = event.videoSegmentTimingInfo.baseMediaDecodeTime;
13133 }
13134
13135 /**
13136 * callback to run when appendBuffer is finished. detects if we are
13137 * in a good state to do things with the data we got, or if we need
13138 * to wait for more
13139 *
13140 * @private
13141 */
13142
13143 }, {
13144 key: 'handleUpdateEnd_',
13145 value: function handleUpdateEnd_() {
13146 if (!this.pendingSegment_) {
13147 this.state = 'READY';
13148 if (!this.paused()) {
13149 this.monitorBuffer_();
13150 }
13151 return;
13152 }
13153
13154 var segmentInfo = this.pendingSegment_;
13155 var segment = segmentInfo.segment;
13156 var isWalkingForward = this.mediaIndex !== null;
13157
13158 this.pendingSegment_ = null;
13159 this.recordThroughput_(segmentInfo);
13160 this.addSegmentMetadataCue_(segmentInfo);
13161
13162 this.state = 'READY';
13163
13164 this.mediaIndex = segmentInfo.mediaIndex;
13165 this.fetchAtBuffer_ = true;
13166 this.currentTimeline_ = segmentInfo.timeline;
13167
13168 // We must update the syncinfo to recalculate the seekable range before
13169 // the following conditional otherwise it may consider this a bad "guess"
13170 // and attempt to resync when the post-update seekable window and live
13171 // point would mean that this was the perfect segment to fetch
13172 this.trigger('syncinfoupdate');
13173
13174 // If we previously appended a segment that ends more than 3 targetDurations before
13175 // the currentTime_ that means that our conservative guess was too conservative.
13176 // In that case, reset the loader state so that we try to use any information gained
13177 // from the previous request to create a new, more accurate, sync-point.
13178 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
13179 this.resetEverything();
13180 return;
13181 }
13182
13183 // Don't do a rendition switch unless we have enough time to get a sync segment
13184 // and conservatively guess
13185 if (isWalkingForward) {
13186 this.trigger('bandwidthupdate');
13187 }
13188 this.trigger('progress');
13189
13190 // any time an update finishes and the last segment is in the
13191 // buffer, end the stream. this ensures the "ended" event will
13192 // fire if playback reaches that point.
13193 if (this.isEndOfStream_(segmentInfo.mediaIndex + 1, segmentInfo.playlist)) {
13194 this.endOfStream();
13195 }
13196
13197 if (!this.paused()) {
13198 this.monitorBuffer_();
13199 }
13200 }
13201
13202 /**
13203 * Records the current throughput of the decrypt, transmux, and append
13204 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
13205 * moving average of the throughput. `throughput.count` is the number of
13206 * data points in the average.
13207 *
13208 * @private
13209 * @param {Object} segmentInfo the object returned by loadSegment
13210 */
13211
13212 }, {
13213 key: 'recordThroughput_',
13214 value: function recordThroughput_(segmentInfo) {
13215 var rate = this.throughput.rate;
13216 // Add one to the time to ensure that we don't accidentally attempt to divide
13217 // by zero in the case where the throughput is ridiculously high
13218 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1;
13219 // Multiply by 8000 to convert from bytes/millisecond to bits/second
13220 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000);
13221
13222 // This is just a cumulative moving average calculation:
13223 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
13224 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
13225 }
13226
13227 /**
13228 * Adds a cue to the segment-metadata track with some metadata information about the
13229 * segment
13230 *
13231 * @private
13232 * @param {Object} segmentInfo
13233 * the object returned by loadSegment
13234 * @method addSegmentMetadataCue_
13235 */
13236
13237 }, {
13238 key: 'addSegmentMetadataCue_',
13239 value: function addSegmentMetadataCue_(segmentInfo) {
13240 if (!this.segmentMetadataTrack_) {
13241 return;
13242 }
13243
13244 var segment = segmentInfo.segment;
13245 var start = segment.start;
13246 var end = segment.end;
13247
13248 // Do not try adding the cue if the start and end times are invalid.
13249 if (!finite(start) || !finite(end)) {
13250 return;
13251 }
13252
13253 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
13254
13255 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
13256 var value = {
13257 custom: segment.custom,
13258 dateTimeObject: segment.dateTimeObject,
13259 dateTimeString: segment.dateTimeString,
13260 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
13261 resolution: segmentInfo.playlist.attributes.RESOLUTION,
13262 codecs: segmentInfo.playlist.attributes.CODECS,
13263 byteLength: segmentInfo.byteLength,
13264 uri: segmentInfo.uri,
13265 timeline: segmentInfo.timeline,
13266 playlist: segmentInfo.playlist.id,
13267 start: start,
13268 end: end
13269 };
13270 var data = JSON.stringify(value);
13271 var cue = new Cue(start, end, data);
13272
13273 // Attach the metadata to the value property of the cue to keep consistency between
13274 // the differences of WebKitDataCue in safari and VTTCue in other browsers
13275 cue.value = value;
13276
13277 this.segmentMetadataTrack_.addCue(cue);
13278 }
13279 }]);
13280 return SegmentLoader;
13281}(videojs.EventTarget);
13282
13283var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
13284 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
13285};
13286
13287/**
13288 * @file vtt-segment-loader.js
13289 */
13290
13291var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
13292 return char.charCodeAt(0);
13293}));
13294
13295/**
13296 * An object that manages segment loading and appending.
13297 *
13298 * @class VTTSegmentLoader
13299 * @param {Object} options required and optional options
13300 * @extends videojs.EventTarget
13301 */
13302
13303var VTTSegmentLoader = function (_SegmentLoader) {
13304 inherits(VTTSegmentLoader, _SegmentLoader);
13305
13306 function VTTSegmentLoader(settings) {
13307 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
13308 classCallCheck(this, VTTSegmentLoader);
13309
13310 // SegmentLoader requires a MediaSource be specified or it will throw an error;
13311 // however, VTTSegmentLoader has no need of a media source, so delete the reference
13312 var _this = possibleConstructorReturn(this, (VTTSegmentLoader.__proto__ || Object.getPrototypeOf(VTTSegmentLoader)).call(this, settings, options));
13313
13314 _this.mediaSource_ = null;
13315
13316 _this.subtitlesTrack_ = null;
13317
13318 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
13319 return _this;
13320 }
13321
13322 /**
13323 * Indicates which time ranges are buffered
13324 *
13325 * @return {TimeRange}
13326 * TimeRange object representing the current buffered ranges
13327 */
13328
13329
13330 createClass(VTTSegmentLoader, [{
13331 key: 'buffered_',
13332 value: function buffered_() {
13333 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues.length) {
13334 return videojs.createTimeRanges();
13335 }
13336
13337 var cues = this.subtitlesTrack_.cues;
13338 var start = cues[0].startTime;
13339 var end = cues[cues.length - 1].startTime;
13340
13341 return videojs.createTimeRanges([[start, end]]);
13342 }
13343
13344 /**
13345 * Gets and sets init segment for the provided map
13346 *
13347 * @param {Object} map
13348 * The map object representing the init segment to get or set
13349 * @param {Boolean=} set
13350 * If true, the init segment for the provided map should be saved
13351 * @return {Object}
13352 * map object for desired init segment
13353 */
13354
13355 }, {
13356 key: 'initSegment',
13357 value: function initSegment(map) {
13358 var set$$1 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
13359
13360 if (!map) {
13361 return null;
13362 }
13363
13364 var id = initSegmentId(map);
13365 var storedMap = this.initSegments_[id];
13366
13367 if (set$$1 && !storedMap && map.bytes) {
13368 // append WebVTT line terminators to the media initialization segment if it exists
13369 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
13370 // requires two or more WebVTT line terminators between the WebVTT header and the
13371 // rest of the file
13372 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
13373 var combinedSegment = new Uint8Array(combinedByteLength);
13374
13375 combinedSegment.set(map.bytes);
13376 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
13377
13378 this.initSegments_[id] = storedMap = {
13379 resolvedUri: map.resolvedUri,
13380 byterange: map.byterange,
13381 bytes: combinedSegment
13382 };
13383 }
13384
13385 return storedMap || map;
13386 }
13387
13388 /**
13389 * Returns true if all configuration required for loading is present, otherwise false.
13390 *
13391 * @return {Boolean} True if the all configuration is ready for loading
13392 * @private
13393 */
13394
13395 }, {
13396 key: 'couldBeginLoading_',
13397 value: function couldBeginLoading_() {
13398 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
13399 }
13400
13401 /**
13402 * Once all the starting parameters have been specified, begin
13403 * operation. This method should only be invoked from the INIT
13404 * state.
13405 *
13406 * @private
13407 */
13408
13409 }, {
13410 key: 'init_',
13411 value: function init_() {
13412 this.state = 'READY';
13413 this.resetEverything();
13414 return this.monitorBuffer_();
13415 }
13416
13417 /**
13418 * Set a subtitle track on the segment loader to add subtitles to
13419 *
13420 * @param {TextTrack=} track
13421 * The text track to add loaded subtitles to
13422 * @return {TextTrack}
13423 * Returns the subtitles track
13424 */
13425
13426 }, {
13427 key: 'track',
13428 value: function track(_track) {
13429 if (typeof _track === 'undefined') {
13430 return this.subtitlesTrack_;
13431 }
13432
13433 this.subtitlesTrack_ = _track;
13434
13435 // if we were unpaused but waiting for a sourceUpdater, start
13436 // buffering now
13437 if (this.state === 'INIT' && this.couldBeginLoading_()) {
13438 this.init_();
13439 }
13440
13441 return this.subtitlesTrack_;
13442 }
13443
13444 /**
13445 * Remove any data in the source buffer between start and end times
13446 * @param {Number} start - the start time of the region to remove from the buffer
13447 * @param {Number} end - the end time of the region to remove from the buffer
13448 */
13449
13450 }, {
13451 key: 'remove',
13452 value: function remove(start, end) {
13453 removeCuesFromTrack(start, end, this.subtitlesTrack_);
13454 }
13455
13456 /**
13457 * fill the buffer with segements unless the sourceBuffers are
13458 * currently updating
13459 *
13460 * Note: this function should only ever be called by monitorBuffer_
13461 * and never directly
13462 *
13463 * @private
13464 */
13465
13466 }, {
13467 key: 'fillBuffer_',
13468 value: function fillBuffer_() {
13469 var _this2 = this;
13470
13471 if (!this.syncPoint_) {
13472 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
13473 }
13474
13475 // see if we need to begin loading immediately
13476 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
13477
13478 segmentInfo = this.skipEmptySegments_(segmentInfo);
13479
13480 if (!segmentInfo) {
13481 return;
13482 }
13483
13484 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
13485 // We don't have the timestamp offset that we need to sync subtitles.
13486 // Rerun on a timestamp offset or user interaction.
13487 var checkTimestampOffset = function checkTimestampOffset() {
13488 _this2.state = 'READY';
13489 if (!_this2.paused()) {
13490 // if not paused, queue a buffer check as soon as possible
13491 _this2.monitorBuffer_();
13492 }
13493 };
13494
13495 this.syncController_.one('timestampoffset', checkTimestampOffset);
13496 this.state = 'WAITING_ON_TIMELINE';
13497 return;
13498 }
13499
13500 this.loadSegment_(segmentInfo);
13501 }
13502
13503 /**
13504 * Prevents the segment loader from requesting segments we know contain no subtitles
13505 * by walking forward until we find the next segment that we don't know whether it is
13506 * empty or not.
13507 *
13508 * @param {Object} segmentInfo
13509 * a segment info object that describes the current segment
13510 * @return {Object}
13511 * a segment info object that describes the current segment
13512 */
13513
13514 }, {
13515 key: 'skipEmptySegments_',
13516 value: function skipEmptySegments_(segmentInfo) {
13517 while (segmentInfo && segmentInfo.segment.empty) {
13518 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
13519 }
13520 return segmentInfo;
13521 }
13522
13523 /**
13524 * append a decrypted segement to the SourceBuffer through a SourceUpdater
13525 *
13526 * @private
13527 */
13528
13529 }, {
13530 key: 'handleSegment_',
13531 value: function handleSegment_() {
13532 var _this3 = this;
13533
13534 if (!this.pendingSegment_ || !this.subtitlesTrack_) {
13535 this.state = 'READY';
13536 return;
13537 }
13538
13539 this.state = 'APPENDING';
13540
13541 var segmentInfo = this.pendingSegment_;
13542 var segment = segmentInfo.segment;
13543
13544 // Make sure that vttjs has loaded, otherwise, wait till it finished loading
13545 if (typeof window$1.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
13546
13547 var loadHandler = void 0;
13548 var errorHandler = function errorHandler() {
13549 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
13550 _this3.error({
13551 message: 'Error loading vtt.js'
13552 });
13553 _this3.state = 'READY';
13554 _this3.pause();
13555 _this3.trigger('error');
13556 };
13557
13558 loadHandler = function loadHandler() {
13559 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
13560 _this3.handleSegment_();
13561 };
13562
13563 this.state = 'WAITING_ON_VTTJS';
13564 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
13565 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
13566
13567 return;
13568 }
13569
13570 segment.requested = true;
13571
13572 try {
13573 this.parseVTTCues_(segmentInfo);
13574 } catch (e) {
13575 this.error({
13576 message: e.message
13577 });
13578 this.state = 'READY';
13579 this.pause();
13580 return this.trigger('error');
13581 }
13582
13583 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
13584
13585 if (segmentInfo.isSyncRequest) {
13586 this.trigger('syncinfoupdate');
13587 this.pendingSegment_ = null;
13588 this.state = 'READY';
13589 return;
13590 }
13591
13592 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
13593
13594 this.mediaSecondsLoaded += segment.duration;
13595
13596 if (segmentInfo.cues.length) {
13597 // remove any overlapping cues to prevent doubling
13598 this.remove(segmentInfo.cues[0].endTime, segmentInfo.cues[segmentInfo.cues.length - 1].endTime);
13599 }
13600
13601 segmentInfo.cues.forEach(function (cue) {
13602 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window$1.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
13603 });
13604
13605 this.handleUpdateEnd_();
13606 }
13607
13608 /**
13609 * Uses the WebVTT parser to parse the segment response
13610 *
13611 * @param {Object} segmentInfo
13612 * a segment info object that describes the current segment
13613 * @private
13614 */
13615
13616 }, {
13617 key: 'parseVTTCues_',
13618 value: function parseVTTCues_(segmentInfo) {
13619 var decoder = void 0;
13620 var decodeBytesToString = false;
13621
13622 if (typeof window$1.TextDecoder === 'function') {
13623 decoder = new window$1.TextDecoder('utf8');
13624 } else {
13625 decoder = window$1.WebVTT.StringDecoder();
13626 decodeBytesToString = true;
13627 }
13628
13629 var parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, decoder);
13630
13631 segmentInfo.cues = [];
13632 segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
13633
13634 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
13635 parser.ontimestampmap = function (map) {
13636 return segmentInfo.timestampmap = map;
13637 };
13638 parser.onparsingerror = function (error) {
13639 videojs.log.warn('Error encountered when parsing cues: ' + error.message);
13640 };
13641
13642 if (segmentInfo.segment.map) {
13643 var mapData = segmentInfo.segment.map.bytes;
13644
13645 if (decodeBytesToString) {
13646 mapData = uint8ToUtf8(mapData);
13647 }
13648
13649 parser.parse(mapData);
13650 }
13651
13652 var segmentData = segmentInfo.bytes;
13653
13654 if (decodeBytesToString) {
13655 segmentData = uint8ToUtf8(segmentData);
13656 }
13657
13658 parser.parse(segmentData);
13659 parser.flush();
13660 }
13661
13662 /**
13663 * Updates the start and end times of any cues parsed by the WebVTT parser using
13664 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
13665 * from the SyncController
13666 *
13667 * @param {Object} segmentInfo
13668 * a segment info object that describes the current segment
13669 * @param {Object} mappingObj
13670 * object containing a mapping from TS to media time
13671 * @param {Object} playlist
13672 * the playlist object containing the segment
13673 * @private
13674 */
13675
13676 }, {
13677 key: 'updateTimeMapping_',
13678 value: function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
13679 var segment = segmentInfo.segment;
13680
13681 if (!mappingObj) {
13682 // If the sync controller does not have a mapping of TS to Media Time for the
13683 // timeline, then we don't have enough information to update the cue
13684 // start/end times
13685 return;
13686 }
13687
13688 if (!segmentInfo.cues.length) {
13689 // If there are no cues, we also do not have enough information to figure out
13690 // segment timing. Mark that the segment contains no cues so we don't re-request
13691 // an empty segment.
13692 segment.empty = true;
13693 return;
13694 }
13695
13696 var timestampmap = segmentInfo.timestampmap;
13697 var diff = timestampmap.MPEGTS / 90000 - timestampmap.LOCAL + mappingObj.mapping;
13698
13699 segmentInfo.cues.forEach(function (cue) {
13700 // First convert cue time to TS time using the timestamp-map provided within the vtt
13701 cue.startTime += diff;
13702 cue.endTime += diff;
13703 });
13704
13705 if (!playlist.syncInfo) {
13706 var firstStart = segmentInfo.cues[0].startTime;
13707 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
13708
13709 playlist.syncInfo = {
13710 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
13711 time: Math.min(firstStart, lastStart - segment.duration)
13712 };
13713 }
13714 }
13715 }]);
13716 return VTTSegmentLoader;
13717}(SegmentLoader);
13718
13719/**
13720 * @file ad-cue-tags.js
13721 */
13722
13723/**
13724 * Searches for an ad cue that overlaps with the given mediaTime
13725 */
13726var findAdCue = function findAdCue(track, mediaTime) {
13727 var cues = track.cues;
13728
13729 for (var i = 0; i < cues.length; i++) {
13730 var cue = cues[i];
13731
13732 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
13733 return cue;
13734 }
13735 }
13736 return null;
13737};
13738
13739var updateAdCues = function updateAdCues(media, track) {
13740 var offset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
13741
13742 if (!media.segments) {
13743 return;
13744 }
13745
13746 var mediaTime = offset;
13747 var cue = void 0;
13748
13749 for (var i = 0; i < media.segments.length; i++) {
13750 var segment = media.segments[i];
13751
13752 if (!cue) {
13753 // Since the cues will span for at least the segment duration, adding a fudge
13754 // factor of half segment duration will prevent duplicate cues from being
13755 // created when timing info is not exact (e.g. cue start time initialized
13756 // at 10.006677, but next call mediaTime is 10.003332 )
13757 cue = findAdCue(track, mediaTime + segment.duration / 2);
13758 }
13759
13760 if (cue) {
13761 if ('cueIn' in segment) {
13762 // Found a CUE-IN so end the cue
13763 cue.endTime = mediaTime;
13764 cue.adEndTime = mediaTime;
13765 mediaTime += segment.duration;
13766 cue = null;
13767 continue;
13768 }
13769
13770 if (mediaTime < cue.endTime) {
13771 // Already processed this mediaTime for this cue
13772 mediaTime += segment.duration;
13773 continue;
13774 }
13775
13776 // otherwise extend cue until a CUE-IN is found
13777 cue.endTime += segment.duration;
13778 } else {
13779 if ('cueOut' in segment) {
13780 cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
13781 cue.adStartTime = mediaTime;
13782 // Assumes tag format to be
13783 // #EXT-X-CUE-OUT:30
13784 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
13785 track.addCue(cue);
13786 }
13787
13788 if ('cueOutCont' in segment) {
13789 // Entered into the middle of an ad cue
13790 var adOffset = void 0;
13791 var adTotal = void 0;
13792
13793 // Assumes tag formate to be
13794 // #EXT-X-CUE-OUT-CONT:10/30
13795
13796 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat);
13797
13798 var _segment$cueOutCont$s2 = slicedToArray(_segment$cueOutCont$s, 2);
13799
13800 adOffset = _segment$cueOutCont$s2[0];
13801 adTotal = _segment$cueOutCont$s2[1];
13802
13803
13804 cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, '');
13805 cue.adStartTime = mediaTime - adOffset;
13806 cue.adEndTime = cue.adStartTime + adTotal;
13807 track.addCue(cue);
13808 }
13809 }
13810 mediaTime += segment.duration;
13811 }
13812};
13813
13814/**
13815 * @file sync-controller.js
13816 */
13817
13818var tsprobe = tsInspector.inspect;
13819
13820var syncPointStrategies = [
13821// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
13822// the equivalence display-time 0 === segment-index 0
13823{
13824 name: 'VOD',
13825 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
13826 if (duration$$1 !== Infinity) {
13827 var syncPoint = {
13828 time: 0,
13829 segmentIndex: 0
13830 };
13831
13832 return syncPoint;
13833 }
13834 return null;
13835 }
13836},
13837// Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
13838{
13839 name: 'ProgramDateTime',
13840 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
13841 if (!syncController.datetimeToDisplayTime) {
13842 return null;
13843 }
13844
13845 var segments = playlist.segments || [];
13846 var syncPoint = null;
13847 var lastDistance = null;
13848
13849 currentTime = currentTime || 0;
13850
13851 for (var i = 0; i < segments.length; i++) {
13852 var segment = segments[i];
13853
13854 if (segment.dateTimeObject) {
13855 var segmentTime = segment.dateTimeObject.getTime() / 1000;
13856 var segmentStart = segmentTime + syncController.datetimeToDisplayTime;
13857 var distance = Math.abs(currentTime - segmentStart);
13858
13859 // Once the distance begins to increase, or if distance is 0, we have passed
13860 // currentTime and can stop looking for better candidates
13861 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
13862 break;
13863 }
13864
13865 lastDistance = distance;
13866 syncPoint = {
13867 time: segmentStart,
13868 segmentIndex: i
13869 };
13870 }
13871 }
13872 return syncPoint;
13873 }
13874},
13875// Stategy "Segment": We have a known time mapping for a timeline and a
13876// segment in the current timeline with timing data
13877{
13878 name: 'Segment',
13879 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
13880 var segments = playlist.segments || [];
13881 var syncPoint = null;
13882 var lastDistance = null;
13883
13884 currentTime = currentTime || 0;
13885
13886 for (var i = 0; i < segments.length; i++) {
13887 var segment = segments[i];
13888
13889 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
13890 var distance = Math.abs(currentTime - segment.start);
13891
13892 // Once the distance begins to increase, we have passed
13893 // currentTime and can stop looking for better candidates
13894 if (lastDistance !== null && lastDistance < distance) {
13895 break;
13896 }
13897
13898 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
13899 lastDistance = distance;
13900 syncPoint = {
13901 time: segment.start,
13902 segmentIndex: i
13903 };
13904 }
13905 }
13906 }
13907 return syncPoint;
13908 }
13909},
13910// Stategy "Discontinuity": We have a discontinuity with a known
13911// display-time
13912{
13913 name: 'Discontinuity',
13914 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
13915 var syncPoint = null;
13916
13917 currentTime = currentTime || 0;
13918
13919 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
13920 var lastDistance = null;
13921
13922 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
13923 var segmentIndex = playlist.discontinuityStarts[i];
13924 var discontinuity = playlist.discontinuitySequence + i + 1;
13925 var discontinuitySync = syncController.discontinuities[discontinuity];
13926
13927 if (discontinuitySync) {
13928 var distance = Math.abs(currentTime - discontinuitySync.time);
13929
13930 // Once the distance begins to increase, we have passed
13931 // currentTime and can stop looking for better candidates
13932 if (lastDistance !== null && lastDistance < distance) {
13933 break;
13934 }
13935
13936 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
13937 lastDistance = distance;
13938 syncPoint = {
13939 time: discontinuitySync.time,
13940 segmentIndex: segmentIndex
13941 };
13942 }
13943 }
13944 }
13945 }
13946 return syncPoint;
13947 }
13948},
13949// Stategy "Playlist": We have a playlist with a known mapping of
13950// segment index to display time
13951{
13952 name: 'Playlist',
13953 run: function run(syncController, playlist, duration$$1, currentTimeline, currentTime) {
13954 if (playlist.syncInfo) {
13955 var syncPoint = {
13956 time: playlist.syncInfo.time,
13957 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
13958 };
13959
13960 return syncPoint;
13961 }
13962 return null;
13963 }
13964}];
13965
13966var SyncController = function (_videojs$EventTarget) {
13967 inherits(SyncController, _videojs$EventTarget);
13968
13969 function SyncController() {
13970 classCallCheck(this, SyncController);
13971
13972 // Segment Loader state variables...
13973 // ...for synching across variants
13974 var _this = possibleConstructorReturn(this, (SyncController.__proto__ || Object.getPrototypeOf(SyncController)).call(this));
13975
13976 _this.inspectCache_ = undefined;
13977
13978 // ...for synching across variants
13979 _this.timelines = [];
13980 _this.discontinuities = [];
13981 _this.datetimeToDisplayTime = null;
13982
13983 _this.logger_ = logger('SyncController');
13984 return _this;
13985 }
13986
13987 /**
13988 * Find a sync-point for the playlist specified
13989 *
13990 * A sync-point is defined as a known mapping from display-time to
13991 * a segment-index in the current playlist.
13992 *
13993 * @param {Playlist} playlist
13994 * The playlist that needs a sync-point
13995 * @param {Number} duration
13996 * Duration of the MediaSource (Infinite if playing a live source)
13997 * @param {Number} currentTimeline
13998 * The last timeline from which a segment was loaded
13999 * @returns {Object}
14000 * A sync-point object
14001 */
14002
14003
14004 createClass(SyncController, [{
14005 key: 'getSyncPoint',
14006 value: function getSyncPoint(playlist, duration$$1, currentTimeline, currentTime) {
14007 var syncPoints = this.runStrategies_(playlist, duration$$1, currentTimeline, currentTime);
14008
14009 if (!syncPoints.length) {
14010 // Signal that we need to attempt to get a sync-point manually
14011 // by fetching a segment in the playlist and constructing
14012 // a sync-point from that information
14013 return null;
14014 }
14015
14016 // Now find the sync-point that is closest to the currentTime because
14017 // that should result in the most accurate guess about which segment
14018 // to fetch
14019 return this.selectSyncPoint_(syncPoints, { key: 'time', value: currentTime });
14020 }
14021
14022 /**
14023 * Calculate the amount of time that has expired off the playlist during playback
14024 *
14025 * @param {Playlist} playlist
14026 * Playlist object to calculate expired from
14027 * @param {Number} duration
14028 * Duration of the MediaSource (Infinity if playling a live source)
14029 * @returns {Number|null}
14030 * The amount of time that has expired off the playlist during playback. Null
14031 * if no sync-points for the playlist can be found.
14032 */
14033
14034 }, {
14035 key: 'getExpiredTime',
14036 value: function getExpiredTime(playlist, duration$$1) {
14037 if (!playlist || !playlist.segments) {
14038 return null;
14039 }
14040
14041 var syncPoints = this.runStrategies_(playlist, duration$$1, playlist.discontinuitySequence, 0);
14042
14043 // Without sync-points, there is not enough information to determine the expired time
14044 if (!syncPoints.length) {
14045 return null;
14046 }
14047
14048 var syncPoint = this.selectSyncPoint_(syncPoints, {
14049 key: 'segmentIndex',
14050 value: 0
14051 });
14052
14053 // If the sync-point is beyond the start of the playlist, we want to subtract the
14054 // duration from index 0 to syncPoint.segmentIndex instead of adding.
14055 if (syncPoint.segmentIndex > 0) {
14056 syncPoint.time *= -1;
14057 }
14058
14059 return Math.abs(syncPoint.time + sumDurations(playlist, syncPoint.segmentIndex, 0));
14060 }
14061
14062 /**
14063 * Runs each sync-point strategy and returns a list of sync-points returned by the
14064 * strategies
14065 *
14066 * @private
14067 * @param {Playlist} playlist
14068 * The playlist that needs a sync-point
14069 * @param {Number} duration
14070 * Duration of the MediaSource (Infinity if playing a live source)
14071 * @param {Number} currentTimeline
14072 * The last timeline from which a segment was loaded
14073 * @returns {Array}
14074 * A list of sync-point objects
14075 */
14076
14077 }, {
14078 key: 'runStrategies_',
14079 value: function runStrategies_(playlist, duration$$1, currentTimeline, currentTime) {
14080 var syncPoints = [];
14081
14082 // Try to find a sync-point in by utilizing various strategies...
14083 for (var i = 0; i < syncPointStrategies.length; i++) {
14084 var strategy = syncPointStrategies[i];
14085 var syncPoint = strategy.run(this, playlist, duration$$1, currentTimeline, currentTime);
14086
14087 if (syncPoint) {
14088 syncPoint.strategy = strategy.name;
14089 syncPoints.push({
14090 strategy: strategy.name,
14091 syncPoint: syncPoint
14092 });
14093 }
14094 }
14095
14096 return syncPoints;
14097 }
14098
14099 /**
14100 * Selects the sync-point nearest the specified target
14101 *
14102 * @private
14103 * @param {Array} syncPoints
14104 * List of sync-points to select from
14105 * @param {Object} target
14106 * Object specifying the property and value we are targeting
14107 * @param {String} target.key
14108 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
14109 * @param {Number} target.value
14110 * The value to target for the specified key.
14111 * @returns {Object}
14112 * The sync-point nearest the target
14113 */
14114
14115 }, {
14116 key: 'selectSyncPoint_',
14117 value: function selectSyncPoint_(syncPoints, target) {
14118 var bestSyncPoint = syncPoints[0].syncPoint;
14119 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
14120 var bestStrategy = syncPoints[0].strategy;
14121
14122 for (var i = 1; i < syncPoints.length; i++) {
14123 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
14124
14125 if (newDistance < bestDistance) {
14126 bestDistance = newDistance;
14127 bestSyncPoint = syncPoints[i].syncPoint;
14128 bestStrategy = syncPoints[i].strategy;
14129 }
14130 }
14131
14132 this.logger_('syncPoint for [' + target.key + ': ' + target.value + '] chosen with strategy' + (' [' + bestStrategy + ']: [time:' + bestSyncPoint.time + ',') + (' segmentIndex:' + bestSyncPoint.segmentIndex + ']'));
14133
14134 return bestSyncPoint;
14135 }
14136
14137 /**
14138 * Save any meta-data present on the segments when segments leave
14139 * the live window to the playlist to allow for synchronization at the
14140 * playlist level later.
14141 *
14142 * @param {Playlist} oldPlaylist - The previous active playlist
14143 * @param {Playlist} newPlaylist - The updated and most current playlist
14144 */
14145
14146 }, {
14147 key: 'saveExpiredSegmentInfo',
14148 value: function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
14149 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
14150
14151 // When a segment expires from the playlist and it has a start time
14152 // save that information as a possible sync-point reference in future
14153 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
14154 var lastRemovedSegment = oldPlaylist.segments[i];
14155
14156 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
14157 newPlaylist.syncInfo = {
14158 mediaSequence: oldPlaylist.mediaSequence + i,
14159 time: lastRemovedSegment.start
14160 };
14161 this.logger_('playlist refresh sync: [time:' + newPlaylist.syncInfo.time + ',' + (' mediaSequence: ' + newPlaylist.syncInfo.mediaSequence + ']'));
14162 this.trigger('syncinfoupdate');
14163 break;
14164 }
14165 }
14166 }
14167
14168 /**
14169 * Save the mapping from playlist's ProgramDateTime to display. This should
14170 * only ever happen once at the start of playback.
14171 *
14172 * @param {Playlist} playlist - The currently active playlist
14173 */
14174
14175 }, {
14176 key: 'setDateTimeMapping',
14177 value: function setDateTimeMapping(playlist) {
14178 if (!this.datetimeToDisplayTime && playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
14179 var playlistTimestamp = playlist.segments[0].dateTimeObject.getTime() / 1000;
14180
14181 this.datetimeToDisplayTime = -playlistTimestamp;
14182 }
14183 }
14184
14185 /**
14186 * Reset the state of the inspection cache when we do a rendition
14187 * switch
14188 */
14189
14190 }, {
14191 key: 'reset',
14192 value: function reset() {
14193 this.inspectCache_ = undefined;
14194 }
14195
14196 /**
14197 * Probe or inspect a fmp4 or an mpeg2-ts segment to determine the start
14198 * and end of the segment in it's internal "media time". Used to generate
14199 * mappings from that internal "media time" to the display time that is
14200 * shown on the player.
14201 *
14202 * @param {SegmentInfo} segmentInfo - The current active request information
14203 */
14204
14205 }, {
14206 key: 'probeSegmentInfo',
14207 value: function probeSegmentInfo(segmentInfo) {
14208 var segment = segmentInfo.segment;
14209 var playlist = segmentInfo.playlist;
14210 var timingInfo = void 0;
14211
14212 if (segment.map) {
14213 timingInfo = this.probeMp4Segment_(segmentInfo);
14214 } else {
14215 timingInfo = this.probeTsSegment_(segmentInfo);
14216 }
14217
14218 if (timingInfo) {
14219 if (this.calculateSegmentTimeMapping_(segmentInfo, timingInfo)) {
14220 this.saveDiscontinuitySyncInfo_(segmentInfo);
14221
14222 // If the playlist does not have sync information yet, record that information
14223 // now with segment timing information
14224 if (!playlist.syncInfo) {
14225 playlist.syncInfo = {
14226 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
14227 time: segment.start
14228 };
14229 }
14230 }
14231 }
14232
14233 return timingInfo;
14234 }
14235
14236 /**
14237 * Probe an fmp4 segment to determine the start of the segment
14238 * in it's internal "composition time", which is equal to the base
14239 * media decode time plus the composition time offset value
14240 *
14241 * @private
14242 * @param {SegmentInfo} segmentInfo - The current active request information
14243 * @return {object} The start and end time of the current segment in "composition time"
14244 */
14245
14246 }, {
14247 key: 'probeMp4Segment_',
14248 value: function probeMp4Segment_(segmentInfo) {
14249 var segment = segmentInfo.segment;
14250 // get timescales from init segment
14251 var timescales = mp4probe.timescale(segment.map.bytes);
14252 // calculate composition start time using the timescales and information
14253 // contained within the media segment
14254 var compositionStartTime = mp4probe.compositionStartTime(timescales, segmentInfo.bytes);
14255
14256 if (segmentInfo.timestampOffset !== null) {
14257 segmentInfo.timestampOffset -= compositionStartTime;
14258 }
14259
14260 return {
14261 start: compositionStartTime,
14262 end: compositionStartTime + segment.duration
14263 };
14264 }
14265
14266 /**
14267 * Probe an mpeg2-ts segment to determine the start and end of the segment
14268 * in it's internal "media time".
14269 *
14270 * @private
14271 * @param {SegmentInfo} segmentInfo - The current active request information
14272 * @return {object} The start and end time of the current segment in "media time"
14273 */
14274
14275 }, {
14276 key: 'probeTsSegment_',
14277 value: function probeTsSegment_(segmentInfo) {
14278 var timeInfo = tsprobe(segmentInfo.bytes, this.inspectCache_);
14279 var segmentStartTime = void 0;
14280 var segmentEndTime = void 0;
14281 var segmentTimestampInfo = void 0;
14282
14283 if (!timeInfo) {
14284 return null;
14285 }
14286
14287 if (timeInfo.video && timeInfo.video.length === 2) {
14288 this.inspectCache_ = timeInfo.video[1].dts;
14289 segmentStartTime = timeInfo.video[0].dtsTime;
14290 segmentEndTime = timeInfo.video[1].dtsTime;
14291 segmentTimestampInfo = timeInfo.video;
14292 } else if (timeInfo.audio && timeInfo.audio.length === 2) {
14293 this.inspectCache_ = timeInfo.audio[1].dts;
14294 segmentStartTime = timeInfo.audio[0].dtsTime;
14295 segmentEndTime = timeInfo.audio[1].dtsTime;
14296 segmentTimestampInfo = timeInfo.audio;
14297 }
14298
14299 var probedInfo = {
14300 segmentTimestampInfo: segmentTimestampInfo,
14301 start: segmentStartTime,
14302 end: segmentEndTime,
14303 containsVideo: timeInfo.video && timeInfo.video.length === 2,
14304 containsAudio: timeInfo.audio && timeInfo.audio.length === 2
14305 };
14306
14307 return probedInfo;
14308 }
14309 }, {
14310 key: 'timestampOffsetForTimeline',
14311 value: function timestampOffsetForTimeline(timeline) {
14312 if (typeof this.timelines[timeline] === 'undefined') {
14313 return null;
14314 }
14315 return this.timelines[timeline].time;
14316 }
14317 }, {
14318 key: 'mappingForTimeline',
14319 value: function mappingForTimeline(timeline) {
14320 if (typeof this.timelines[timeline] === 'undefined') {
14321 return null;
14322 }
14323 return this.timelines[timeline].mapping;
14324 }
14325
14326 /**
14327 * Use the "media time" for a segment to generate a mapping to "display time" and
14328 * save that display time to the segment.
14329 *
14330 * @private
14331 * @param {SegmentInfo} segmentInfo
14332 * The current active request information
14333 * @param {object} timingInfo
14334 * The start and end time of the current segment in "media time"
14335 * @returns {Boolean}
14336 * Returns false if segment time mapping could not be calculated
14337 */
14338
14339 }, {
14340 key: 'calculateSegmentTimeMapping_',
14341 value: function calculateSegmentTimeMapping_(segmentInfo, timingInfo) {
14342 var segment = segmentInfo.segment;
14343 var mappingObj = this.timelines[segmentInfo.timeline];
14344
14345 if (segmentInfo.timestampOffset !== null) {
14346 mappingObj = {
14347 time: segmentInfo.startOfSegment,
14348 mapping: segmentInfo.startOfSegment - timingInfo.start
14349 };
14350 this.timelines[segmentInfo.timeline] = mappingObj;
14351 this.trigger('timestampoffset');
14352
14353 this.logger_('time mapping for timeline ' + segmentInfo.timeline + ': ' + ('[time: ' + mappingObj.time + '] [mapping: ' + mappingObj.mapping + ']'));
14354
14355 segment.start = segmentInfo.startOfSegment;
14356 segment.end = timingInfo.end + mappingObj.mapping;
14357 } else if (mappingObj) {
14358 segment.start = timingInfo.start + mappingObj.mapping;
14359 segment.end = timingInfo.end + mappingObj.mapping;
14360 } else {
14361 return false;
14362 }
14363
14364 return true;
14365 }
14366
14367 /**
14368 * Each time we have discontinuity in the playlist, attempt to calculate the location
14369 * in display of the start of the discontinuity and save that. We also save an accuracy
14370 * value so that we save values with the most accuracy (closest to 0.)
14371 *
14372 * @private
14373 * @param {SegmentInfo} segmentInfo - The current active request information
14374 */
14375
14376 }, {
14377 key: 'saveDiscontinuitySyncInfo_',
14378 value: function saveDiscontinuitySyncInfo_(segmentInfo) {
14379 var playlist = segmentInfo.playlist;
14380 var segment = segmentInfo.segment;
14381
14382 // If the current segment is a discontinuity then we know exactly where
14383 // the start of the range and it's accuracy is 0 (greater accuracy values
14384 // mean more approximation)
14385 if (segment.discontinuity) {
14386 this.discontinuities[segment.timeline] = {
14387 time: segment.start,
14388 accuracy: 0
14389 };
14390 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
14391 // Search for future discontinuities that we can provide better timing
14392 // information for and save that information for sync purposes
14393 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
14394 var segmentIndex = playlist.discontinuityStarts[i];
14395 var discontinuity = playlist.discontinuitySequence + i + 1;
14396 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
14397 var accuracy = Math.abs(mediaIndexDiff);
14398
14399 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
14400 var time = void 0;
14401
14402 if (mediaIndexDiff < 0) {
14403 time = segment.start - sumDurations(playlist, segmentInfo.mediaIndex, segmentIndex);
14404 } else {
14405 time = segment.end + sumDurations(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
14406 }
14407
14408 this.discontinuities[discontinuity] = {
14409 time: time,
14410 accuracy: accuracy
14411 };
14412 }
14413 }
14414 }
14415 }
14416 }, {
14417 key: 'dispose',
14418 value: function dispose() {
14419 this.trigger('dispose');
14420 this.off();
14421 }
14422 }]);
14423 return SyncController;
14424}(videojs.EventTarget);
14425
14426var Decrypter$1 = new shimWorker("./decrypter-worker.worker.js", function (window, document$$1) {
14427 var self = this;
14428 var decrypterWorker = function () {
14429
14430 /*
14431 * pkcs7.pad
14432 * https://github.com/brightcove/pkcs7
14433 *
14434 * Copyright (c) 2014 Brightcove
14435 * Licensed under the apache2 license.
14436 */
14437
14438 /**
14439 * Returns the subarray of a Uint8Array without PKCS#7 padding.
14440 * @param padded {Uint8Array} unencrypted bytes that have been padded
14441 * @return {Uint8Array} the unpadded bytes
14442 * @see http://tools.ietf.org/html/rfc5652
14443 */
14444
14445 function unpad(padded) {
14446 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
14447 }
14448
14449 var classCallCheck = function classCallCheck(instance, Constructor) {
14450 if (!(instance instanceof Constructor)) {
14451 throw new TypeError("Cannot call a class as a function");
14452 }
14453 };
14454
14455 var createClass = function () {
14456 function defineProperties(target, props) {
14457 for (var i = 0; i < props.length; i++) {
14458 var descriptor = props[i];
14459 descriptor.enumerable = descriptor.enumerable || false;
14460 descriptor.configurable = true;
14461 if ("value" in descriptor) descriptor.writable = true;
14462 Object.defineProperty(target, descriptor.key, descriptor);
14463 }
14464 }
14465
14466 return function (Constructor, protoProps, staticProps) {
14467 if (protoProps) defineProperties(Constructor.prototype, protoProps);
14468 if (staticProps) defineProperties(Constructor, staticProps);
14469 return Constructor;
14470 };
14471 }();
14472
14473 var inherits = function inherits(subClass, superClass) {
14474 if (typeof superClass !== "function" && superClass !== null) {
14475 throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
14476 }
14477
14478 subClass.prototype = Object.create(superClass && superClass.prototype, {
14479 constructor: {
14480 value: subClass,
14481 enumerable: false,
14482 writable: true,
14483 configurable: true
14484 }
14485 });
14486 if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
14487 };
14488
14489 var possibleConstructorReturn = function possibleConstructorReturn(self, call) {
14490 if (!self) {
14491 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
14492 }
14493
14494 return call && (typeof call === "object" || typeof call === "function") ? call : self;
14495 };
14496
14497 /**
14498 * @file aes.js
14499 *
14500 * This file contains an adaptation of the AES decryption algorithm
14501 * from the Standford Javascript Cryptography Library. That work is
14502 * covered by the following copyright and permissions notice:
14503 *
14504 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
14505 * All rights reserved.
14506 *
14507 * Redistribution and use in source and binary forms, with or without
14508 * modification, are permitted provided that the following conditions are
14509 * met:
14510 *
14511 * 1. Redistributions of source code must retain the above copyright
14512 * notice, this list of conditions and the following disclaimer.
14513 *
14514 * 2. Redistributions in binary form must reproduce the above
14515 * copyright notice, this list of conditions and the following
14516 * disclaimer in the documentation and/or other materials provided
14517 * with the distribution.
14518 *
14519 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
14520 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
14521 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
14522 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
14523 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
14524 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
14525 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
14526 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
14527 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
14528 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
14529 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
14530 *
14531 * The views and conclusions contained in the software and documentation
14532 * are those of the authors and should not be interpreted as representing
14533 * official policies, either expressed or implied, of the authors.
14534 */
14535
14536 /**
14537 * Expand the S-box tables.
14538 *
14539 * @private
14540 */
14541 var precompute = function precompute() {
14542 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
14543 var encTable = tables[0];
14544 var decTable = tables[1];
14545 var sbox = encTable[4];
14546 var sboxInv = decTable[4];
14547 var i = void 0;
14548 var x = void 0;
14549 var xInv = void 0;
14550 var d = [];
14551 var th = [];
14552 var x2 = void 0;
14553 var x4 = void 0;
14554 var x8 = void 0;
14555 var s = void 0;
14556 var tEnc = void 0;
14557 var tDec = void 0;
14558
14559 // Compute double and third tables
14560 for (i = 0; i < 256; i++) {
14561 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
14562 }
14563
14564 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
14565 // Compute sbox
14566 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
14567 s = s >> 8 ^ s & 255 ^ 99;
14568 sbox[x] = s;
14569 sboxInv[s] = x;
14570
14571 // Compute MixColumns
14572 x8 = d[x4 = d[x2 = d[x]]];
14573 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
14574 tEnc = d[s] * 0x101 ^ s * 0x1010100;
14575
14576 for (i = 0; i < 4; i++) {
14577 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
14578 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
14579 }
14580 }
14581
14582 // Compactify. Considerable speedup on Firefox.
14583 for (i = 0; i < 5; i++) {
14584 encTable[i] = encTable[i].slice(0);
14585 decTable[i] = decTable[i].slice(0);
14586 }
14587 return tables;
14588 };
14589 var aesTables = null;
14590
14591 /**
14592 * Schedule out an AES key for both encryption and decryption. This
14593 * is a low-level class. Use a cipher mode to do bulk encryption.
14594 *
14595 * @class AES
14596 * @param key {Array} The key as an array of 4, 6 or 8 words.
14597 */
14598
14599 var AES = function () {
14600 function AES(key) {
14601 classCallCheck(this, AES);
14602
14603 /**
14604 * The expanded S-box and inverse S-box tables. These will be computed
14605 * on the client so that we don't have to send them down the wire.
14606 *
14607 * There are two tables, _tables[0] is for encryption and
14608 * _tables[1] is for decryption.
14609 *
14610 * The first 4 sub-tables are the expanded S-box with MixColumns. The
14611 * last (_tables[01][4]) is the S-box itself.
14612 *
14613 * @private
14614 */
14615 // if we have yet to precompute the S-box tables
14616 // do so now
14617 if (!aesTables) {
14618 aesTables = precompute();
14619 }
14620 // then make a copy of that object for use
14621 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
14622 var i = void 0;
14623 var j = void 0;
14624 var tmp = void 0;
14625 var encKey = void 0;
14626 var decKey = void 0;
14627 var sbox = this._tables[0][4];
14628 var decTable = this._tables[1];
14629 var keyLen = key.length;
14630 var rcon = 1;
14631
14632 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
14633 throw new Error('Invalid aes key size');
14634 }
14635
14636 encKey = key.slice(0);
14637 decKey = [];
14638 this._key = [encKey, decKey];
14639
14640 // schedule encryption keys
14641 for (i = keyLen; i < 4 * keyLen + 28; i++) {
14642 tmp = encKey[i - 1];
14643
14644 // apply sbox
14645 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
14646 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
14647
14648 // shift rows and add rcon
14649 if (i % keyLen === 0) {
14650 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
14651 rcon = rcon << 1 ^ (rcon >> 7) * 283;
14652 }
14653 }
14654
14655 encKey[i] = encKey[i - keyLen] ^ tmp;
14656 }
14657
14658 // schedule decryption keys
14659 for (j = 0; i; j++, i--) {
14660 tmp = encKey[j & 3 ? i : i - 4];
14661 if (i <= 4 || j < 4) {
14662 decKey[j] = tmp;
14663 } else {
14664 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
14665 }
14666 }
14667 }
14668
14669 /**
14670 * Decrypt 16 bytes, specified as four 32-bit words.
14671 *
14672 * @param {Number} encrypted0 the first word to decrypt
14673 * @param {Number} encrypted1 the second word to decrypt
14674 * @param {Number} encrypted2 the third word to decrypt
14675 * @param {Number} encrypted3 the fourth word to decrypt
14676 * @param {Int32Array} out the array to write the decrypted words
14677 * into
14678 * @param {Number} offset the offset into the output array to start
14679 * writing results
14680 * @return {Array} The plaintext.
14681 */
14682
14683 AES.prototype.decrypt = function decrypt$$1(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
14684 var key = this._key[1];
14685 // state variables a,b,c,d are loaded with pre-whitened data
14686 var a = encrypted0 ^ key[0];
14687 var b = encrypted3 ^ key[1];
14688 var c = encrypted2 ^ key[2];
14689 var d = encrypted1 ^ key[3];
14690 var a2 = void 0;
14691 var b2 = void 0;
14692 var c2 = void 0;
14693
14694 // key.length === 2 ?
14695 var nInnerRounds = key.length / 4 - 2;
14696 var i = void 0;
14697 var kIndex = 4;
14698 var table = this._tables[1];
14699
14700 // load up the tables
14701 var table0 = table[0];
14702 var table1 = table[1];
14703 var table2 = table[2];
14704 var table3 = table[3];
14705 var sbox = table[4];
14706
14707 // Inner rounds. Cribbed from OpenSSL.
14708 for (i = 0; i < nInnerRounds; i++) {
14709 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
14710 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
14711 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
14712 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
14713 kIndex += 4;
14714 a = a2;b = b2;c = c2;
14715 }
14716
14717 // Last round.
14718 for (i = 0; i < 4; i++) {
14719 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
14720 a2 = a;a = b;b = c;c = d;d = a2;
14721 }
14722 };
14723
14724 return AES;
14725 }();
14726
14727 /**
14728 * @file stream.js
14729 */
14730 /**
14731 * A lightweight readable stream implemention that handles event dispatching.
14732 *
14733 * @class Stream
14734 */
14735 var Stream = function () {
14736 function Stream() {
14737 classCallCheck(this, Stream);
14738
14739 this.listeners = {};
14740 }
14741
14742 /**
14743 * Add a listener for a specified event type.
14744 *
14745 * @param {String} type the event name
14746 * @param {Function} listener the callback to be invoked when an event of
14747 * the specified type occurs
14748 */
14749
14750 Stream.prototype.on = function on(type, listener) {
14751 if (!this.listeners[type]) {
14752 this.listeners[type] = [];
14753 }
14754 this.listeners[type].push(listener);
14755 };
14756
14757 /**
14758 * Remove a listener for a specified event type.
14759 *
14760 * @param {String} type the event name
14761 * @param {Function} listener a function previously registered for this
14762 * type of event through `on`
14763 * @return {Boolean} if we could turn it off or not
14764 */
14765
14766 Stream.prototype.off = function off(type, listener) {
14767 if (!this.listeners[type]) {
14768 return false;
14769 }
14770
14771 var index = this.listeners[type].indexOf(listener);
14772
14773 this.listeners[type].splice(index, 1);
14774 return index > -1;
14775 };
14776
14777 /**
14778 * Trigger an event of the specified type on this stream. Any additional
14779 * arguments to this function are passed as parameters to event listeners.
14780 *
14781 * @param {String} type the event name
14782 */
14783
14784 Stream.prototype.trigger = function trigger(type) {
14785 var callbacks = this.listeners[type];
14786
14787 if (!callbacks) {
14788 return;
14789 }
14790
14791 // Slicing the arguments on every invocation of this method
14792 // can add a significant amount of overhead. Avoid the
14793 // intermediate object creation for the common case of a
14794 // single callback argument
14795 if (arguments.length === 2) {
14796 var length = callbacks.length;
14797
14798 for (var i = 0; i < length; ++i) {
14799 callbacks[i].call(this, arguments[1]);
14800 }
14801 } else {
14802 var args = Array.prototype.slice.call(arguments, 1);
14803 var _length = callbacks.length;
14804
14805 for (var _i = 0; _i < _length; ++_i) {
14806 callbacks[_i].apply(this, args);
14807 }
14808 }
14809 };
14810
14811 /**
14812 * Destroys the stream and cleans up.
14813 */
14814
14815 Stream.prototype.dispose = function dispose() {
14816 this.listeners = {};
14817 };
14818 /**
14819 * Forwards all `data` events on this stream to the destination stream. The
14820 * destination stream should provide a method `push` to receive the data
14821 * events as they arrive.
14822 *
14823 * @param {Stream} destination the stream that will receive all `data` events
14824 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
14825 */
14826
14827 Stream.prototype.pipe = function pipe(destination) {
14828 this.on('data', function (data) {
14829 destination.push(data);
14830 });
14831 };
14832
14833 return Stream;
14834 }();
14835
14836 /**
14837 * @file async-stream.js
14838 */
14839 /**
14840 * A wrapper around the Stream class to use setTiemout
14841 * and run stream "jobs" Asynchronously
14842 *
14843 * @class AsyncStream
14844 * @extends Stream
14845 */
14846
14847 var AsyncStream$$1 = function (_Stream) {
14848 inherits(AsyncStream$$1, _Stream);
14849
14850 function AsyncStream$$1() {
14851 classCallCheck(this, AsyncStream$$1);
14852
14853 var _this = possibleConstructorReturn(this, _Stream.call(this, Stream));
14854
14855 _this.jobs = [];
14856 _this.delay = 1;
14857 _this.timeout_ = null;
14858 return _this;
14859 }
14860
14861 /**
14862 * process an async job
14863 *
14864 * @private
14865 */
14866
14867 AsyncStream$$1.prototype.processJob_ = function processJob_() {
14868 this.jobs.shift()();
14869 if (this.jobs.length) {
14870 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
14871 } else {
14872 this.timeout_ = null;
14873 }
14874 };
14875
14876 /**
14877 * push a job into the stream
14878 *
14879 * @param {Function} job the job to push into the stream
14880 */
14881
14882 AsyncStream$$1.prototype.push = function push(job) {
14883 this.jobs.push(job);
14884 if (!this.timeout_) {
14885 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
14886 }
14887 };
14888
14889 return AsyncStream$$1;
14890 }(Stream);
14891
14892 /**
14893 * @file decrypter.js
14894 *
14895 * An asynchronous implementation of AES-128 CBC decryption with
14896 * PKCS#7 padding.
14897 */
14898
14899 /**
14900 * Convert network-order (big-endian) bytes into their little-endian
14901 * representation.
14902 */
14903 var ntoh = function ntoh(word) {
14904 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
14905 };
14906
14907 /**
14908 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
14909 *
14910 * @param {Uint8Array} encrypted the encrypted bytes
14911 * @param {Uint32Array} key the bytes of the decryption key
14912 * @param {Uint32Array} initVector the initialization vector (IV) to
14913 * use for the first round of CBC.
14914 * @return {Uint8Array} the decrypted bytes
14915 *
14916 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
14917 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
14918 * @see https://tools.ietf.org/html/rfc2315
14919 */
14920 var decrypt$$1 = function decrypt$$1(encrypted, key, initVector) {
14921 // word-level access to the encrypted bytes
14922 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
14923
14924 var decipher = new AES(Array.prototype.slice.call(key));
14925
14926 // byte and word-level access for the decrypted output
14927 var decrypted = new Uint8Array(encrypted.byteLength);
14928 var decrypted32 = new Int32Array(decrypted.buffer);
14929
14930 // temporary variables for working with the IV, encrypted, and
14931 // decrypted data
14932 var init0 = void 0;
14933 var init1 = void 0;
14934 var init2 = void 0;
14935 var init3 = void 0;
14936 var encrypted0 = void 0;
14937 var encrypted1 = void 0;
14938 var encrypted2 = void 0;
14939 var encrypted3 = void 0;
14940
14941 // iteration variable
14942 var wordIx = void 0;
14943
14944 // pull out the words of the IV to ensure we don't modify the
14945 // passed-in reference and easier access
14946 init0 = initVector[0];
14947 init1 = initVector[1];
14948 init2 = initVector[2];
14949 init3 = initVector[3];
14950
14951 // decrypt four word sequences, applying cipher-block chaining (CBC)
14952 // to each decrypted block
14953 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
14954 // convert big-endian (network order) words into little-endian
14955 // (javascript order)
14956 encrypted0 = ntoh(encrypted32[wordIx]);
14957 encrypted1 = ntoh(encrypted32[wordIx + 1]);
14958 encrypted2 = ntoh(encrypted32[wordIx + 2]);
14959 encrypted3 = ntoh(encrypted32[wordIx + 3]);
14960
14961 // decrypt the block
14962 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx);
14963
14964 // XOR with the IV, and restore network byte-order to obtain the
14965 // plaintext
14966 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
14967 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
14968 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
14969 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3);
14970
14971 // setup the IV for the next round
14972 init0 = encrypted0;
14973 init1 = encrypted1;
14974 init2 = encrypted2;
14975 init3 = encrypted3;
14976 }
14977
14978 return decrypted;
14979 };
14980
14981 /**
14982 * The `Decrypter` class that manages decryption of AES
14983 * data through `AsyncStream` objects and the `decrypt`
14984 * function
14985 *
14986 * @param {Uint8Array} encrypted the encrypted bytes
14987 * @param {Uint32Array} key the bytes of the decryption key
14988 * @param {Uint32Array} initVector the initialization vector (IV) to
14989 * @param {Function} done the function to run when done
14990 * @class Decrypter
14991 */
14992
14993 var Decrypter$$1 = function () {
14994 function Decrypter$$1(encrypted, key, initVector, done) {
14995 classCallCheck(this, Decrypter$$1);
14996
14997 var step = Decrypter$$1.STEP;
14998 var encrypted32 = new Int32Array(encrypted.buffer);
14999 var decrypted = new Uint8Array(encrypted.byteLength);
15000 var i = 0;
15001
15002 this.asyncStream_ = new AsyncStream$$1();
15003
15004 // split up the encryption job and do the individual chunks asynchronously
15005 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
15006 for (i = step; i < encrypted32.length; i += step) {
15007 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
15008 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
15009 }
15010 // invoke the done() callback when everything is finished
15011 this.asyncStream_.push(function () {
15012 // remove pkcs#7 padding from the decrypted bytes
15013 done(null, unpad(decrypted));
15014 });
15015 }
15016
15017 /**
15018 * a getter for step the maximum number of bytes to process at one time
15019 *
15020 * @return {Number} the value of step 32000
15021 */
15022
15023 /**
15024 * @private
15025 */
15026 Decrypter$$1.prototype.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
15027 return function () {
15028 var bytes = decrypt$$1(encrypted, key, initVector);
15029
15030 decrypted.set(bytes, encrypted.byteOffset);
15031 };
15032 };
15033
15034 createClass(Decrypter$$1, null, [{
15035 key: 'STEP',
15036 get: function get$$1() {
15037 // 4 * 8000;
15038 return 32000;
15039 }
15040 }]);
15041 return Decrypter$$1;
15042 }();
15043
15044 /**
15045 * @file bin-utils.js
15046 */
15047
15048 /**
15049 * Creates an object for sending to a web worker modifying properties that are TypedArrays
15050 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
15051 *
15052 * @param {Object} message
15053 * Object of properties and values to send to the web worker
15054 * @return {Object}
15055 * Modified message with TypedArray values expanded
15056 * @function createTransferableMessage
15057 */
15058 var createTransferableMessage = function createTransferableMessage(message) {
15059 var transferable = {};
15060
15061 Object.keys(message).forEach(function (key) {
15062 var value = message[key];
15063
15064 if (ArrayBuffer.isView(value)) {
15065 transferable[key] = {
15066 bytes: value.buffer,
15067 byteOffset: value.byteOffset,
15068 byteLength: value.byteLength
15069 };
15070 } else {
15071 transferable[key] = value;
15072 }
15073 });
15074
15075 return transferable;
15076 };
15077
15078 /**
15079 * Our web worker interface so that things can talk to aes-decrypter
15080 * that will be running in a web worker. the scope is passed to this by
15081 * webworkify.
15082 *
15083 * @param {Object} self
15084 * the scope for the web worker
15085 */
15086 var DecrypterWorker = function DecrypterWorker(self) {
15087 self.onmessage = function (event) {
15088 var data = event.data;
15089 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
15090 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
15091 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
15092
15093 /* eslint-disable no-new, handle-callback-err */
15094 new Decrypter$$1(encrypted, key, iv, function (err, bytes) {
15095 self.postMessage(createTransferableMessage({
15096 source: data.source,
15097 decrypted: bytes
15098 }), [bytes.buffer]);
15099 });
15100 /* eslint-enable */
15101 };
15102 };
15103
15104 var decrypterWorker = new DecrypterWorker(self);
15105
15106 return decrypterWorker;
15107 }();
15108});
15109
15110/**
15111 * Convert the properties of an HLS track into an audioTrackKind.
15112 *
15113 * @private
15114 */
15115var audioTrackKind_ = function audioTrackKind_(properties) {
15116 var kind = properties.default ? 'main' : 'alternative';
15117
15118 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
15119 kind = 'main-desc';
15120 }
15121
15122 return kind;
15123};
15124
15125/**
15126 * Pause provided segment loader and playlist loader if active
15127 *
15128 * @param {SegmentLoader} segmentLoader
15129 * SegmentLoader to pause
15130 * @param {Object} mediaType
15131 * Active media type
15132 * @function stopLoaders
15133 */
15134var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
15135 segmentLoader.abort();
15136 segmentLoader.pause();
15137
15138 if (mediaType && mediaType.activePlaylistLoader) {
15139 mediaType.activePlaylistLoader.pause();
15140 mediaType.activePlaylistLoader = null;
15141 }
15142};
15143
15144/**
15145 * Start loading provided segment loader and playlist loader
15146 *
15147 * @param {PlaylistLoader} playlistLoader
15148 * PlaylistLoader to start loading
15149 * @param {Object} mediaType
15150 * Active media type
15151 * @function startLoaders
15152 */
15153var startLoaders = function startLoaders(playlistLoader, mediaType) {
15154 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
15155 // playlist loader
15156 mediaType.activePlaylistLoader = playlistLoader;
15157 playlistLoader.load();
15158};
15159
15160/**
15161 * Returns a function to be called when the media group changes. It performs a
15162 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
15163 * change of group is merely a rendition switch of the same content at another encoding,
15164 * rather than a change of content, such as switching audio from English to Spanish.
15165 *
15166 * @param {String} type
15167 * MediaGroup type
15168 * @param {Object} settings
15169 * Object containing required information for media groups
15170 * @return {Function}
15171 * Handler for a non-destructive resync of SegmentLoader when the active media
15172 * group changes.
15173 * @function onGroupChanged
15174 */
15175var onGroupChanged = function onGroupChanged(type, settings) {
15176 return function () {
15177 var _settings$segmentLoad = settings.segmentLoaders,
15178 segmentLoader = _settings$segmentLoad[type],
15179 mainSegmentLoader = _settings$segmentLoad.main,
15180 mediaType = settings.mediaTypes[type];
15181
15182 var activeTrack = mediaType.activeTrack();
15183 var activeGroup = mediaType.activeGroup(activeTrack);
15184 var previousActiveLoader = mediaType.activePlaylistLoader;
15185
15186 stopLoaders(segmentLoader, mediaType);
15187
15188 if (!activeGroup) {
15189 // there is no group active
15190 return;
15191 }
15192
15193 if (!activeGroup.playlistLoader) {
15194 if (previousActiveLoader) {
15195 // The previous group had a playlist loader but the new active group does not
15196 // this means we are switching from demuxed to muxed audio. In this case we want to
15197 // do a destructive reset of the main segment loader and not restart the audio
15198 // loaders.
15199 mainSegmentLoader.resetEverything();
15200 }
15201 return;
15202 }
15203
15204 // Non-destructive resync
15205 segmentLoader.resyncLoader();
15206
15207 startLoaders(activeGroup.playlistLoader, mediaType);
15208 };
15209};
15210
15211/**
15212 * Returns a function to be called when the media track changes. It performs a
15213 * destructive reset of the SegmentLoader to ensure we start loading as close to
15214 * currentTime as possible.
15215 *
15216 * @param {String} type
15217 * MediaGroup type
15218 * @param {Object} settings
15219 * Object containing required information for media groups
15220 * @return {Function}
15221 * Handler for a destructive reset of SegmentLoader when the active media
15222 * track changes.
15223 * @function onTrackChanged
15224 */
15225var onTrackChanged = function onTrackChanged(type, settings) {
15226 return function () {
15227 var _settings$segmentLoad2 = settings.segmentLoaders,
15228 segmentLoader = _settings$segmentLoad2[type],
15229 mainSegmentLoader = _settings$segmentLoad2.main,
15230 mediaType = settings.mediaTypes[type];
15231
15232 var activeTrack = mediaType.activeTrack();
15233 var activeGroup = mediaType.activeGroup(activeTrack);
15234 var previousActiveLoader = mediaType.activePlaylistLoader;
15235
15236 stopLoaders(segmentLoader, mediaType);
15237
15238 if (!activeGroup) {
15239 // there is no group active so we do not want to restart loaders
15240 return;
15241 }
15242
15243 if (!activeGroup.playlistLoader) {
15244 // when switching from demuxed audio/video to muxed audio/video (noted by no playlist
15245 // loader for the audio group), we want to do a destructive reset of the main segment
15246 // loader and not restart the audio loaders
15247 mainSegmentLoader.resetEverything();
15248 return;
15249 }
15250
15251 if (previousActiveLoader === activeGroup.playlistLoader) {
15252 // Nothing has actually changed. This can happen because track change events can fire
15253 // multiple times for a "single" change. One for enabling the new active track, and
15254 // one for disabling the track that was active
15255 startLoaders(activeGroup.playlistLoader, mediaType);
15256 return;
15257 }
15258
15259 if (segmentLoader.track) {
15260 // For WebVTT, set the new text track in the segmentloader
15261 segmentLoader.track(activeTrack);
15262 }
15263
15264 // destructive reset
15265 segmentLoader.resetEverything();
15266
15267 startLoaders(activeGroup.playlistLoader, mediaType);
15268 };
15269};
15270
15271var onError = {
15272 /**
15273 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
15274 * an error.
15275 *
15276 * @param {String} type
15277 * MediaGroup type
15278 * @param {Object} settings
15279 * Object containing required information for media groups
15280 * @return {Function}
15281 * Error handler. Logs warning (or error if the playlist is blacklisted) to
15282 * console and switches back to default audio track.
15283 * @function onError.AUDIO
15284 */
15285 AUDIO: function AUDIO(type, settings) {
15286 return function () {
15287 var segmentLoader = settings.segmentLoaders[type],
15288 mediaType = settings.mediaTypes[type],
15289 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
15290
15291
15292 stopLoaders(segmentLoader, mediaType);
15293
15294 // switch back to default audio track
15295 var activeTrack = mediaType.activeTrack();
15296 var activeGroup = mediaType.activeGroup();
15297 var id = (activeGroup.filter(function (group) {
15298 return group.default;
15299 })[0] || activeGroup[0]).id;
15300 var defaultTrack = mediaType.tracks[id];
15301
15302 if (activeTrack === defaultTrack) {
15303 // Default track encountered an error. All we can do now is blacklist the current
15304 // rendition and hope another will switch audio groups
15305 blacklistCurrentPlaylist({
15306 message: 'Problem encountered loading the default audio track.'
15307 });
15308 return;
15309 }
15310
15311 videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
15312
15313 for (var trackId in mediaType.tracks) {
15314 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
15315 }
15316
15317 mediaType.onTrackChanged();
15318 };
15319 },
15320 /**
15321 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
15322 * an error.
15323 *
15324 * @param {String} type
15325 * MediaGroup type
15326 * @param {Object} settings
15327 * Object containing required information for media groups
15328 * @return {Function}
15329 * Error handler. Logs warning to console and disables the active subtitle track
15330 * @function onError.SUBTITLES
15331 */
15332 SUBTITLES: function SUBTITLES(type, settings) {
15333 return function () {
15334 var segmentLoader = settings.segmentLoaders[type],
15335 mediaType = settings.mediaTypes[type];
15336
15337
15338 videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
15339
15340 stopLoaders(segmentLoader, mediaType);
15341
15342 var track = mediaType.activeTrack();
15343
15344 if (track) {
15345 track.mode = 'disabled';
15346 }
15347
15348 mediaType.onTrackChanged();
15349 };
15350 }
15351};
15352
15353var setupListeners = {
15354 /**
15355 * Setup event listeners for audio playlist loader
15356 *
15357 * @param {String} type
15358 * MediaGroup type
15359 * @param {PlaylistLoader|null} playlistLoader
15360 * PlaylistLoader to register listeners on
15361 * @param {Object} settings
15362 * Object containing required information for media groups
15363 * @function setupListeners.AUDIO
15364 */
15365 AUDIO: function AUDIO(type, playlistLoader, settings) {
15366 if (!playlistLoader) {
15367 // no playlist loader means audio will be muxed with the video
15368 return;
15369 }
15370
15371 var tech = settings.tech,
15372 requestOptions = settings.requestOptions,
15373 segmentLoader = settings.segmentLoaders[type];
15374
15375
15376 playlistLoader.on('loadedmetadata', function () {
15377 var media = playlistLoader.media();
15378
15379 segmentLoader.playlist(media, requestOptions);
15380
15381 // if the video is already playing, or if this isn't a live video and preload
15382 // permits, start downloading segments
15383 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
15384 segmentLoader.load();
15385 }
15386 });
15387
15388 playlistLoader.on('loadedplaylist', function () {
15389 segmentLoader.playlist(playlistLoader.media(), requestOptions);
15390
15391 // If the player isn't paused, ensure that the segment loader is running
15392 if (!tech.paused()) {
15393 segmentLoader.load();
15394 }
15395 });
15396
15397 playlistLoader.on('error', onError[type](type, settings));
15398 },
15399 /**
15400 * Setup event listeners for subtitle playlist loader
15401 *
15402 * @param {String} type
15403 * MediaGroup type
15404 * @param {PlaylistLoader|null} playlistLoader
15405 * PlaylistLoader to register listeners on
15406 * @param {Object} settings
15407 * Object containing required information for media groups
15408 * @function setupListeners.SUBTITLES
15409 */
15410 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
15411 var tech = settings.tech,
15412 requestOptions = settings.requestOptions,
15413 segmentLoader = settings.segmentLoaders[type],
15414 mediaType = settings.mediaTypes[type];
15415
15416
15417 playlistLoader.on('loadedmetadata', function () {
15418 var media = playlistLoader.media();
15419
15420 segmentLoader.playlist(media, requestOptions);
15421 segmentLoader.track(mediaType.activeTrack());
15422
15423 // if the video is already playing, or if this isn't a live video and preload
15424 // permits, start downloading segments
15425 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
15426 segmentLoader.load();
15427 }
15428 });
15429
15430 playlistLoader.on('loadedplaylist', function () {
15431 segmentLoader.playlist(playlistLoader.media(), requestOptions);
15432
15433 // If the player isn't paused, ensure that the segment loader is running
15434 if (!tech.paused()) {
15435 segmentLoader.load();
15436 }
15437 });
15438
15439 playlistLoader.on('error', onError[type](type, settings));
15440 }
15441};
15442
15443var initialize = {
15444 /**
15445 * Setup PlaylistLoaders and AudioTracks for the audio groups
15446 *
15447 * @param {String} type
15448 * MediaGroup type
15449 * @param {Object} settings
15450 * Object containing required information for media groups
15451 * @function initialize.AUDIO
15452 */
15453 'AUDIO': function AUDIO(type, settings) {
15454 var hls = settings.hls,
15455 sourceType = settings.sourceType,
15456 segmentLoader = settings.segmentLoaders[type],
15457 requestOptions = settings.requestOptions,
15458 mediaGroups = settings.master.mediaGroups,
15459 _settings$mediaTypes$ = settings.mediaTypes[type],
15460 groups = _settings$mediaTypes$.groups,
15461 tracks = _settings$mediaTypes$.tracks,
15462 masterPlaylistLoader = settings.masterPlaylistLoader;
15463
15464 // force a default if we have none
15465
15466 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
15467 mediaGroups[type] = { main: { default: { default: true } } };
15468 }
15469
15470 for (var groupId in mediaGroups[type]) {
15471 if (!groups[groupId]) {
15472 groups[groupId] = [];
15473 }
15474
15475 // List of playlists that have an AUDIO attribute value matching the current
15476 // group ID
15477
15478 for (var variantLabel in mediaGroups[type][groupId]) {
15479 var properties = mediaGroups[type][groupId][variantLabel];
15480 var playlistLoader = void 0;
15481
15482 if (properties.resolvedUri) {
15483 playlistLoader = new PlaylistLoader(properties.resolvedUri, hls, requestOptions);
15484 } else if (properties.playlists && sourceType === 'dash') {
15485 playlistLoader = new DashPlaylistLoader(properties.playlists[0], hls, requestOptions, masterPlaylistLoader);
15486 } else {
15487 // no resolvedUri means the audio is muxed with the video when using this
15488 // audio track
15489 playlistLoader = null;
15490 }
15491
15492 properties = videojs.mergeOptions({ id: variantLabel, playlistLoader: playlistLoader }, properties);
15493
15494 setupListeners[type](type, properties.playlistLoader, settings);
15495
15496 groups[groupId].push(properties);
15497
15498 if (typeof tracks[variantLabel] === 'undefined') {
15499 var track = new videojs.AudioTrack({
15500 id: variantLabel,
15501 kind: audioTrackKind_(properties),
15502 enabled: false,
15503 language: properties.language,
15504 default: properties.default,
15505 label: variantLabel
15506 });
15507
15508 tracks[variantLabel] = track;
15509 }
15510 }
15511 }
15512
15513 // setup single error event handler for the segment loader
15514 segmentLoader.on('error', onError[type](type, settings));
15515 },
15516 /**
15517 * Setup PlaylistLoaders and TextTracks for the subtitle groups
15518 *
15519 * @param {String} type
15520 * MediaGroup type
15521 * @param {Object} settings
15522 * Object containing required information for media groups
15523 * @function initialize.SUBTITLES
15524 */
15525 'SUBTITLES': function SUBTITLES(type, settings) {
15526 var tech = settings.tech,
15527 hls = settings.hls,
15528 sourceType = settings.sourceType,
15529 segmentLoader = settings.segmentLoaders[type],
15530 requestOptions = settings.requestOptions,
15531 mediaGroups = settings.master.mediaGroups,
15532 _settings$mediaTypes$2 = settings.mediaTypes[type],
15533 groups = _settings$mediaTypes$2.groups,
15534 tracks = _settings$mediaTypes$2.tracks,
15535 masterPlaylistLoader = settings.masterPlaylistLoader;
15536
15537
15538 for (var groupId in mediaGroups[type]) {
15539 if (!groups[groupId]) {
15540 groups[groupId] = [];
15541 }
15542
15543 for (var variantLabel in mediaGroups[type][groupId]) {
15544 if (mediaGroups[type][groupId][variantLabel].forced) {
15545 // Subtitle playlists with the forced attribute are not selectable in Safari.
15546 // According to Apple's HLS Authoring Specification:
15547 // If content has forced subtitles and regular subtitles in a given language,
15548 // the regular subtitles track in that language MUST contain both the forced
15549 // subtitles and the regular subtitles for that language.
15550 // Because of this requirement and that Safari does not add forced subtitles,
15551 // forced subtitles are skipped here to maintain consistent experience across
15552 // all platforms
15553 continue;
15554 }
15555
15556 var properties = mediaGroups[type][groupId][variantLabel];
15557
15558 var playlistLoader = void 0;
15559
15560 if (sourceType === 'hls') {
15561 playlistLoader = new PlaylistLoader(properties.resolvedUri, hls, requestOptions);
15562 } else if (sourceType === 'dash') {
15563 playlistLoader = new DashPlaylistLoader(properties.playlists[0], hls, requestOptions, masterPlaylistLoader);
15564 }
15565
15566 properties = videojs.mergeOptions({
15567 id: variantLabel,
15568 playlistLoader: playlistLoader
15569 }, properties);
15570
15571 setupListeners[type](type, properties.playlistLoader, settings);
15572
15573 groups[groupId].push(properties);
15574
15575 if (typeof tracks[variantLabel] === 'undefined') {
15576 var track = tech.addRemoteTextTrack({
15577 id: variantLabel,
15578 kind: 'subtitles',
15579 default: properties.default && properties.autoselect,
15580 language: properties.language,
15581 label: variantLabel
15582 }, false).track;
15583
15584 tracks[variantLabel] = track;
15585 }
15586 }
15587 }
15588
15589 // setup single error event handler for the segment loader
15590 segmentLoader.on('error', onError[type](type, settings));
15591 },
15592 /**
15593 * Setup TextTracks for the closed-caption groups
15594 *
15595 * @param {String} type
15596 * MediaGroup type
15597 * @param {Object} settings
15598 * Object containing required information for media groups
15599 * @function initialize['CLOSED-CAPTIONS']
15600 */
15601 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
15602 var tech = settings.tech,
15603 mediaGroups = settings.master.mediaGroups,
15604 _settings$mediaTypes$3 = settings.mediaTypes[type],
15605 groups = _settings$mediaTypes$3.groups,
15606 tracks = _settings$mediaTypes$3.tracks;
15607
15608
15609 for (var groupId in mediaGroups[type]) {
15610 if (!groups[groupId]) {
15611 groups[groupId] = [];
15612 }
15613
15614 for (var variantLabel in mediaGroups[type][groupId]) {
15615 var properties = mediaGroups[type][groupId][variantLabel];
15616
15617 // We only support CEA608 captions for now, so ignore anything that
15618 // doesn't use a CCx INSTREAM-ID
15619 if (!properties.instreamId.match(/CC\d/)) {
15620 continue;
15621 }
15622
15623 // No PlaylistLoader is required for Closed-Captions because the captions are
15624 // embedded within the video stream
15625 groups[groupId].push(videojs.mergeOptions({ id: variantLabel }, properties));
15626
15627 if (typeof tracks[variantLabel] === 'undefined') {
15628 var track = tech.addRemoteTextTrack({
15629 id: properties.instreamId,
15630 kind: 'captions',
15631 default: properties.default && properties.autoselect,
15632 language: properties.language,
15633 label: variantLabel
15634 }, false).track;
15635
15636 tracks[variantLabel] = track;
15637 }
15638 }
15639 }
15640 }
15641};
15642
15643/**
15644 * Returns a function used to get the active group of the provided type
15645 *
15646 * @param {String} type
15647 * MediaGroup type
15648 * @param {Object} settings
15649 * Object containing required information for media groups
15650 * @return {Function}
15651 * Function that returns the active media group for the provided type. Takes an
15652 * optional parameter {TextTrack} track. If no track is provided, a list of all
15653 * variants in the group, otherwise the variant corresponding to the provided
15654 * track is returned.
15655 * @function activeGroup
15656 */
15657var activeGroup = function activeGroup(type, settings) {
15658 return function (track) {
15659 var masterPlaylistLoader = settings.masterPlaylistLoader,
15660 groups = settings.mediaTypes[type].groups;
15661
15662
15663 var media = masterPlaylistLoader.media();
15664
15665 if (!media) {
15666 return null;
15667 }
15668
15669 var variants = null;
15670
15671 if (media.attributes[type]) {
15672 variants = groups[media.attributes[type]];
15673 }
15674
15675 variants = variants || groups.main;
15676
15677 if (typeof track === 'undefined') {
15678 return variants;
15679 }
15680
15681 if (track === null) {
15682 // An active track was specified so a corresponding group is expected. track === null
15683 // means no track is currently active so there is no corresponding group
15684 return null;
15685 }
15686
15687 return variants.filter(function (props) {
15688 return props.id === track.id;
15689 })[0] || null;
15690 };
15691};
15692
15693var activeTrack = {
15694 /**
15695 * Returns a function used to get the active track of type provided
15696 *
15697 * @param {String} type
15698 * MediaGroup type
15699 * @param {Object} settings
15700 * Object containing required information for media groups
15701 * @return {Function}
15702 * Function that returns the active media track for the provided type. Returns
15703 * null if no track is active
15704 * @function activeTrack.AUDIO
15705 */
15706 AUDIO: function AUDIO(type, settings) {
15707 return function () {
15708 var tracks = settings.mediaTypes[type].tracks;
15709
15710
15711 for (var id in tracks) {
15712 if (tracks[id].enabled) {
15713 return tracks[id];
15714 }
15715 }
15716
15717 return null;
15718 };
15719 },
15720 /**
15721 * Returns a function used to get the active track of type provided
15722 *
15723 * @param {String} type
15724 * MediaGroup type
15725 * @param {Object} settings
15726 * Object containing required information for media groups
15727 * @return {Function}
15728 * Function that returns the active media track for the provided type. Returns
15729 * null if no track is active
15730 * @function activeTrack.SUBTITLES
15731 */
15732 SUBTITLES: function SUBTITLES(type, settings) {
15733 return function () {
15734 var tracks = settings.mediaTypes[type].tracks;
15735
15736
15737 for (var id in tracks) {
15738 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
15739 return tracks[id];
15740 }
15741 }
15742
15743 return null;
15744 };
15745 }
15746};
15747
15748/**
15749 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
15750 * Closed-Captions) specified in the master manifest.
15751 *
15752 * @param {Object} settings
15753 * Object containing required information for setting up the media groups
15754 * @param {SegmentLoader} settings.segmentLoaders.AUDIO
15755 * Audio segment loader
15756 * @param {SegmentLoader} settings.segmentLoaders.SUBTITLES
15757 * Subtitle segment loader
15758 * @param {SegmentLoader} settings.segmentLoaders.main
15759 * Main segment loader
15760 * @param {Tech} settings.tech
15761 * The tech of the player
15762 * @param {Object} settings.requestOptions
15763 * XHR request options used by the segment loaders
15764 * @param {PlaylistLoader} settings.masterPlaylistLoader
15765 * PlaylistLoader for the master source
15766 * @param {HlsHandler} settings.hls
15767 * HLS SourceHandler
15768 * @param {Object} settings.master
15769 * The parsed master manifest
15770 * @param {Object} settings.mediaTypes
15771 * Object to store the loaders, tracks, and utility methods for each media type
15772 * @param {Function} settings.blacklistCurrentPlaylist
15773 * Blacklists the current rendition and forces a rendition switch.
15774 * @function setupMediaGroups
15775 */
15776var setupMediaGroups = function setupMediaGroups(settings) {
15777 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
15778 initialize[type](type, settings);
15779 });
15780
15781 var mediaTypes = settings.mediaTypes,
15782 masterPlaylistLoader = settings.masterPlaylistLoader,
15783 tech = settings.tech,
15784 hls = settings.hls;
15785
15786 // setup active group and track getters and change event handlers
15787
15788 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
15789 mediaTypes[type].activeGroup = activeGroup(type, settings);
15790 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
15791 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
15792 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
15793 });
15794
15795 // DO NOT enable the default subtitle or caption track.
15796 // DO enable the default audio track
15797 var audioGroup = mediaTypes.AUDIO.activeGroup();
15798 var groupId = (audioGroup.filter(function (group) {
15799 return group.default;
15800 })[0] || audioGroup[0]).id;
15801
15802 mediaTypes.AUDIO.tracks[groupId].enabled = true;
15803 mediaTypes.AUDIO.onTrackChanged();
15804
15805 masterPlaylistLoader.on('mediachange', function () {
15806 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
15807 return mediaTypes[type].onGroupChanged();
15808 });
15809 });
15810
15811 // custom audio track change event handler for usage event
15812 var onAudioTrackChanged = function onAudioTrackChanged() {
15813 mediaTypes.AUDIO.onTrackChanged();
15814 tech.trigger({ type: 'usage', name: 'hls-audio-change' });
15815 };
15816
15817 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
15818 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
15819
15820 hls.on('dispose', function () {
15821 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
15822 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
15823 });
15824
15825 // clear existing audio tracks and add the ones we just created
15826 tech.clearTracks('audio');
15827
15828 for (var id in mediaTypes.AUDIO.tracks) {
15829 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
15830 }
15831};
15832
15833/**
15834 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
15835 * media type
15836 *
15837 * @return {Object}
15838 * Object to store the loaders, tracks, and utility methods for each media type
15839 * @function createMediaTypes
15840 */
15841var createMediaTypes = function createMediaTypes() {
15842 var mediaTypes = {};
15843
15844 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
15845 mediaTypes[type] = {
15846 groups: {},
15847 tracks: {},
15848 activePlaylistLoader: null,
15849 activeGroup: noop,
15850 activeTrack: noop,
15851 onGroupChanged: noop,
15852 onTrackChanged: noop
15853 };
15854 });
15855
15856 return mediaTypes;
15857};
15858
15859/**
15860 * @file master-playlist-controller.js
15861 */
15862
15863var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
15864
15865var Hls = void 0;
15866
15867// SegmentLoader stats that need to have each loader's
15868// values summed to calculate the final value
15869var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
15870var sumLoaderStat = function sumLoaderStat(stat) {
15871 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
15872};
15873var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
15874 var currentPlaylist = _ref.currentPlaylist,
15875 nextPlaylist = _ref.nextPlaylist,
15876 forwardBuffer = _ref.forwardBuffer,
15877 bufferLowWaterLine = _ref.bufferLowWaterLine,
15878 duration$$1 = _ref.duration,
15879 log = _ref.log;
15880
15881 // we have no other playlist to switch to
15882 if (!nextPlaylist) {
15883 videojs.log.warn('We received no playlist to switch to. Please check your stream.');
15884 return false;
15885 }
15886
15887 // If the playlist is live, then we want to not take low water line into account.
15888 // This is because in LIVE, the player plays 3 segments from the end of the
15889 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
15890 // in those segments, a viewer will never experience a rendition upswitch.
15891 if (!currentPlaylist.endList) {
15892 return true;
15893 }
15894
15895 // For the same reason as LIVE, we ignore the low water line when the VOD
15896 // duration is below the max potential low water line
15897 if (duration$$1 < Config.MAX_BUFFER_LOW_WATER_LINE) {
15898 return true;
15899 }
15900
15901 // we want to switch down to lower resolutions quickly to continue playback, but
15902 if (nextPlaylist.attributes.BANDWIDTH < currentPlaylist.attributes.BANDWIDTH) {
15903 return true;
15904 }
15905
15906 // ensure we have some buffer before we switch up to prevent us running out of
15907 // buffer while loading a higher rendition.
15908 if (forwardBuffer >= bufferLowWaterLine) {
15909 return true;
15910 }
15911
15912 return false;
15913};
15914
15915/**
15916 * the master playlist controller controller all interactons
15917 * between playlists and segmentloaders. At this time this mainly
15918 * involves a master playlist and a series of audio playlists
15919 * if they are available
15920 *
15921 * @class MasterPlaylistController
15922 * @extends videojs.EventTarget
15923 */
15924var MasterPlaylistController = function (_videojs$EventTarget) {
15925 inherits(MasterPlaylistController, _videojs$EventTarget);
15926
15927 function MasterPlaylistController(options) {
15928 classCallCheck(this, MasterPlaylistController);
15929
15930 var _this = possibleConstructorReturn(this, (MasterPlaylistController.__proto__ || Object.getPrototypeOf(MasterPlaylistController)).call(this));
15931
15932 var url = options.url,
15933 handleManifestRedirects = options.handleManifestRedirects,
15934 withCredentials = options.withCredentials,
15935 tech = options.tech,
15936 bandwidth = options.bandwidth,
15937 externHls = options.externHls,
15938 useCueTags = options.useCueTags,
15939 blacklistDuration = options.blacklistDuration,
15940 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
15941 cacheEncryptionKeys = options.cacheEncryptionKeys,
15942 sourceType = options.sourceType;
15943
15944
15945 if (!url) {
15946 throw new Error('A non-empty playlist URL is required');
15947 }
15948
15949 Hls = externHls;
15950
15951 _this.withCredentials = withCredentials;
15952 _this.tech_ = tech;
15953 _this.hls_ = tech.hls;
15954 _this.sourceType_ = sourceType;
15955 _this.useCueTags_ = useCueTags;
15956 _this.blacklistDuration = blacklistDuration;
15957 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
15958 if (_this.useCueTags_) {
15959 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
15960 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
15961 }
15962
15963 _this.requestOptions_ = {
15964 withCredentials: withCredentials,
15965 handleManifestRedirects: handleManifestRedirects,
15966 timeout: null
15967 };
15968
15969 _this.mediaTypes_ = createMediaTypes();
15970
15971 _this.mediaSource = new videojs.MediaSource();
15972
15973 // load the media source into the player
15974 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_.bind(_this));
15975
15976 _this.seekable_ = videojs.createTimeRanges();
15977 _this.hasPlayed_ = false;
15978
15979 _this.syncController_ = new SyncController(options);
15980 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
15981 kind: 'metadata',
15982 label: 'segment-metadata'
15983 }, false).track;
15984
15985 _this.decrypter_ = new Decrypter$1();
15986 _this.inbandTextTracks_ = {};
15987
15988 var segmentLoaderSettings = {
15989 hls: _this.hls_,
15990 mediaSource: _this.mediaSource,
15991 currentTime: _this.tech_.currentTime.bind(_this.tech_),
15992 seekable: function seekable$$1() {
15993 return _this.seekable();
15994 },
15995 seeking: function seeking() {
15996 return _this.tech_.seeking();
15997 },
15998 duration: function duration$$1() {
15999 return _this.mediaSource.duration;
16000 },
16001 hasPlayed: function hasPlayed() {
16002 return _this.hasPlayed_;
16003 },
16004 goalBufferLength: function goalBufferLength() {
16005 return _this.goalBufferLength();
16006 },
16007 bandwidth: bandwidth,
16008 syncController: _this.syncController_,
16009 decrypter: _this.decrypter_,
16010 sourceType: _this.sourceType_,
16011 inbandTextTracks: _this.inbandTextTracks_,
16012 cacheEncryptionKeys: cacheEncryptionKeys
16013 };
16014
16015 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(url, _this.hls_, _this.requestOptions_) : new PlaylistLoader(url, _this.hls_, _this.requestOptions_);
16016 _this.setupMasterPlaylistLoaderListeners_();
16017
16018 // setup segment loaders
16019 // combined audio/video or just video when alternate audio track is selected
16020 _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
16021 segmentMetadataTrack: _this.segmentMetadataTrack_,
16022 loaderType: 'main'
16023 }), options);
16024
16025 // alternate audio track
16026 _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
16027 loaderType: 'audio'
16028 }), options);
16029
16030 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
16031 loaderType: 'vtt',
16032 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
16033 }), options);
16034
16035 _this.setupSegmentLoaderListeners_();
16036
16037 // Create SegmentLoader stat-getters
16038 loaderStats.forEach(function (stat) {
16039 _this[stat + '_'] = sumLoaderStat.bind(_this, stat);
16040 });
16041
16042 _this.logger_ = logger('MPC');
16043
16044 _this.masterPlaylistLoader_.load();
16045 return _this;
16046 }
16047
16048 /**
16049 * Register event handlers on the master playlist loader. A helper
16050 * function for construction time.
16051 *
16052 * @private
16053 */
16054
16055
16056 createClass(MasterPlaylistController, [{
16057 key: 'setupMasterPlaylistLoaderListeners_',
16058 value: function setupMasterPlaylistLoaderListeners_() {
16059 var _this2 = this;
16060
16061 this.masterPlaylistLoader_.on('loadedmetadata', function () {
16062 var media = _this2.masterPlaylistLoader_.media();
16063 var requestTimeout = media.targetDuration * 1.5 * 1000;
16064
16065 // If we don't have any more available playlists, we don't want to
16066 // timeout the request.
16067 if (isLowestEnabledRendition(_this2.masterPlaylistLoader_.master, _this2.masterPlaylistLoader_.media())) {
16068 _this2.requestOptions_.timeout = 0;
16069 } else {
16070 _this2.requestOptions_.timeout = requestTimeout;
16071 }
16072
16073 // if this isn't a live video and preload permits, start
16074 // downloading segments
16075 if (media.endList && _this2.tech_.preload() !== 'none') {
16076 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
16077 _this2.mainSegmentLoader_.load();
16078 }
16079
16080 setupMediaGroups({
16081 sourceType: _this2.sourceType_,
16082 segmentLoaders: {
16083 AUDIO: _this2.audioSegmentLoader_,
16084 SUBTITLES: _this2.subtitleSegmentLoader_,
16085 main: _this2.mainSegmentLoader_
16086 },
16087 tech: _this2.tech_,
16088 requestOptions: _this2.requestOptions_,
16089 masterPlaylistLoader: _this2.masterPlaylistLoader_,
16090 hls: _this2.hls_,
16091 master: _this2.master(),
16092 mediaTypes: _this2.mediaTypes_,
16093 blacklistCurrentPlaylist: _this2.blacklistCurrentPlaylist.bind(_this2)
16094 });
16095
16096 _this2.triggerPresenceUsage_(_this2.master(), media);
16097
16098 try {
16099 _this2.setupSourceBuffers_();
16100 } catch (e) {
16101 videojs.log.warn('Failed to create SourceBuffers', e);
16102 return _this2.mediaSource.endOfStream('decode');
16103 }
16104 _this2.setupFirstPlay();
16105
16106 if (!_this2.mediaTypes_.AUDIO.activePlaylistLoader || _this2.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
16107 _this2.trigger('selectedinitialmedia');
16108 } else {
16109 // We must wait for the active audio playlist loader to
16110 // finish setting up before triggering this event so the
16111 // representations API and EME setup is correct
16112 _this2.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
16113 _this2.trigger('selectedinitialmedia');
16114 });
16115 }
16116 });
16117
16118 this.masterPlaylistLoader_.on('loadedplaylist', function () {
16119 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
16120
16121 if (!updatedPlaylist) {
16122 // blacklist any variants that are not supported by the browser before selecting
16123 // an initial media as the playlist selectors do not consider browser support
16124 _this2.excludeUnsupportedVariants_();
16125
16126 var selectedMedia = void 0;
16127
16128 if (_this2.enableLowInitialPlaylist) {
16129 selectedMedia = _this2.selectInitialPlaylist();
16130 }
16131
16132 if (!selectedMedia) {
16133 selectedMedia = _this2.selectPlaylist();
16134 }
16135
16136 _this2.initialMedia_ = selectedMedia;
16137 _this2.masterPlaylistLoader_.media(_this2.initialMedia_);
16138 return;
16139 }
16140
16141 if (_this2.useCueTags_) {
16142 _this2.updateAdCues_(updatedPlaylist);
16143 }
16144
16145 // TODO: Create a new event on the PlaylistLoader that signals
16146 // that the segments have changed in some way and use that to
16147 // update the SegmentLoader instead of doing it twice here and
16148 // on `mediachange`
16149 _this2.mainSegmentLoader_.playlist(updatedPlaylist, _this2.requestOptions_);
16150 _this2.updateDuration();
16151
16152 // If the player isn't paused, ensure that the segment loader is running,
16153 // as it is possible that it was temporarily stopped while waiting for
16154 // a playlist (e.g., in case the playlist errored and we re-requested it).
16155 if (!_this2.tech_.paused()) {
16156 _this2.mainSegmentLoader_.load();
16157 if (_this2.audioSegmentLoader_) {
16158 _this2.audioSegmentLoader_.load();
16159 }
16160 }
16161
16162 if (!updatedPlaylist.endList) {
16163 var addSeekableRange = function addSeekableRange() {
16164 var seekable$$1 = _this2.seekable();
16165
16166 if (seekable$$1.length !== 0) {
16167 _this2.mediaSource.addSeekableRange_(seekable$$1.start(0), seekable$$1.end(0));
16168 }
16169 };
16170
16171 if (_this2.duration() !== Infinity) {
16172 var onDurationchange = function onDurationchange() {
16173 if (_this2.duration() === Infinity) {
16174 addSeekableRange();
16175 } else {
16176 _this2.tech_.one('durationchange', onDurationchange);
16177 }
16178 };
16179
16180 _this2.tech_.one('durationchange', onDurationchange);
16181 } else {
16182 addSeekableRange();
16183 }
16184 }
16185 });
16186
16187 this.masterPlaylistLoader_.on('error', function () {
16188 _this2.blacklistCurrentPlaylist(_this2.masterPlaylistLoader_.error);
16189 });
16190
16191 this.masterPlaylistLoader_.on('mediachanging', function () {
16192 _this2.mainSegmentLoader_.abort();
16193 _this2.mainSegmentLoader_.pause();
16194 });
16195
16196 this.masterPlaylistLoader_.on('mediachange', function () {
16197 var media = _this2.masterPlaylistLoader_.media();
16198 var requestTimeout = media.targetDuration * 1.5 * 1000;
16199
16200 // If we don't have any more available playlists, we don't want to
16201 // timeout the request.
16202 if (isLowestEnabledRendition(_this2.masterPlaylistLoader_.master, _this2.masterPlaylistLoader_.media())) {
16203 _this2.requestOptions_.timeout = 0;
16204 } else {
16205 _this2.requestOptions_.timeout = requestTimeout;
16206 }
16207
16208 // TODO: Create a new event on the PlaylistLoader that signals
16209 // that the segments have changed in some way and use that to
16210 // update the SegmentLoader instead of doing it twice here and
16211 // on `loadedplaylist`
16212 _this2.mainSegmentLoader_.playlist(media, _this2.requestOptions_);
16213
16214 _this2.mainSegmentLoader_.load();
16215
16216 _this2.tech_.trigger({
16217 type: 'mediachange',
16218 bubbles: true
16219 });
16220 });
16221
16222 this.masterPlaylistLoader_.on('playlistunchanged', function () {
16223 var updatedPlaylist = _this2.masterPlaylistLoader_.media();
16224 var playlistOutdated = _this2.stuckAtPlaylistEnd_(updatedPlaylist);
16225
16226 if (playlistOutdated) {
16227 // Playlist has stopped updating and we're stuck at its end. Try to
16228 // blacklist it and switch to another playlist in the hope that that
16229 // one is updating (and give the player a chance to re-adjust to the
16230 // safe live point).
16231 _this2.blacklistCurrentPlaylist({
16232 message: 'Playlist no longer updating.'
16233 });
16234 // useful for monitoring QoS
16235 _this2.tech_.trigger('playliststuck');
16236 }
16237 });
16238
16239 this.masterPlaylistLoader_.on('renditiondisabled', function () {
16240 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-disabled' });
16241 });
16242 this.masterPlaylistLoader_.on('renditionenabled', function () {
16243 _this2.tech_.trigger({ type: 'usage', name: 'hls-rendition-enabled' });
16244 });
16245 }
16246
16247 /**
16248 * A helper function for triggerring presence usage events once per source
16249 *
16250 * @private
16251 */
16252
16253 }, {
16254 key: 'triggerPresenceUsage_',
16255 value: function triggerPresenceUsage_(master, media) {
16256 var mediaGroups = master.mediaGroups || {};
16257 var defaultDemuxed = true;
16258 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
16259
16260 for (var mediaGroup in mediaGroups.AUDIO) {
16261 for (var label in mediaGroups.AUDIO[mediaGroup]) {
16262 var properties = mediaGroups.AUDIO[mediaGroup][label];
16263
16264 if (!properties.uri) {
16265 defaultDemuxed = false;
16266 }
16267 }
16268 }
16269
16270 if (defaultDemuxed) {
16271 this.tech_.trigger({ type: 'usage', name: 'hls-demuxed' });
16272 }
16273
16274 if (Object.keys(mediaGroups.SUBTITLES).length) {
16275 this.tech_.trigger({ type: 'usage', name: 'hls-webvtt' });
16276 }
16277
16278 if (Hls.Playlist.isAes(media)) {
16279 this.tech_.trigger({ type: 'usage', name: 'hls-aes' });
16280 }
16281
16282 if (Hls.Playlist.isFmp4(media)) {
16283 this.tech_.trigger({ type: 'usage', name: 'hls-fmp4' });
16284 }
16285
16286 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
16287 this.tech_.trigger({ type: 'usage', name: 'hls-alternate-audio' });
16288 }
16289
16290 if (this.useCueTags_) {
16291 this.tech_.trigger({ type: 'usage', name: 'hls-playlist-cue-tags' });
16292 }
16293 }
16294 /**
16295 * Register event handlers on the segment loaders. A helper function
16296 * for construction time.
16297 *
16298 * @private
16299 */
16300
16301 }, {
16302 key: 'setupSegmentLoaderListeners_',
16303 value: function setupSegmentLoaderListeners_() {
16304 var _this3 = this;
16305
16306 this.mainSegmentLoader_.on('bandwidthupdate', function () {
16307 var nextPlaylist = _this3.selectPlaylist();
16308 var currentPlaylist = _this3.masterPlaylistLoader_.media();
16309 var buffered = _this3.tech_.buffered();
16310 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - _this3.tech_.currentTime() : 0;
16311
16312 var bufferLowWaterLine = _this3.bufferLowWaterLine();
16313
16314 if (shouldSwitchToMedia({
16315 currentPlaylist: currentPlaylist,
16316 nextPlaylist: nextPlaylist,
16317 forwardBuffer: forwardBuffer,
16318 bufferLowWaterLine: bufferLowWaterLine,
16319 duration: _this3.duration(),
16320 log: _this3.logger_
16321 })) {
16322 _this3.masterPlaylistLoader_.media(nextPlaylist);
16323 }
16324
16325 _this3.tech_.trigger('bandwidthupdate');
16326 });
16327 this.mainSegmentLoader_.on('progress', function () {
16328 _this3.trigger('progress');
16329 });
16330
16331 this.mainSegmentLoader_.on('error', function () {
16332 _this3.blacklistCurrentPlaylist(_this3.mainSegmentLoader_.error());
16333 });
16334
16335 this.mainSegmentLoader_.on('syncinfoupdate', function () {
16336 _this3.onSyncInfoUpdate_();
16337 });
16338
16339 this.mainSegmentLoader_.on('timestampoffset', function () {
16340 _this3.tech_.trigger({ type: 'usage', name: 'hls-timestamp-offset' });
16341 });
16342 this.audioSegmentLoader_.on('syncinfoupdate', function () {
16343 _this3.onSyncInfoUpdate_();
16344 });
16345
16346 this.mainSegmentLoader_.on('ended', function () {
16347 _this3.onEndOfStream();
16348 });
16349
16350 this.mainSegmentLoader_.on('earlyabort', function () {
16351 _this3.blacklistCurrentPlaylist({
16352 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
16353 }, ABORT_EARLY_BLACKLIST_SECONDS);
16354 });
16355
16356 this.mainSegmentLoader_.on('reseteverything', function () {
16357 // If playing an MTS stream, a videojs.MediaSource is listening for
16358 // hls-reset to reset caption parsing state in the transmuxer
16359 _this3.tech_.trigger('hls-reset');
16360 });
16361
16362 this.mainSegmentLoader_.on('segmenttimemapping', function (event) {
16363 // If playing an MTS stream in html, a videojs.MediaSource is listening for
16364 // hls-segment-time-mapping update its internal mapping of stream to display time
16365 _this3.tech_.trigger({
16366 type: 'hls-segment-time-mapping',
16367 mapping: event.mapping
16368 });
16369 });
16370
16371 this.audioSegmentLoader_.on('ended', function () {
16372 _this3.onEndOfStream();
16373 });
16374 }
16375 }, {
16376 key: 'mediaSecondsLoaded_',
16377 value: function mediaSecondsLoaded_() {
16378 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
16379 }
16380
16381 /**
16382 * Call load on our SegmentLoaders
16383 */
16384
16385 }, {
16386 key: 'load',
16387 value: function load() {
16388 this.mainSegmentLoader_.load();
16389 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
16390 this.audioSegmentLoader_.load();
16391 }
16392 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
16393 this.subtitleSegmentLoader_.load();
16394 }
16395 }
16396
16397 /**
16398 * Re-tune playback quality level for the current player
16399 * conditions without performing destructive actions, like
16400 * removing already buffered content
16401 *
16402 * @private
16403 */
16404
16405 }, {
16406 key: 'smoothQualityChange_',
16407 value: function smoothQualityChange_() {
16408 var media = this.selectPlaylist();
16409
16410 if (media !== this.masterPlaylistLoader_.media()) {
16411 this.masterPlaylistLoader_.media(media);
16412
16413 this.mainSegmentLoader_.resetLoader();
16414 // don't need to reset audio as it is reset when media changes
16415 }
16416 }
16417
16418 /**
16419 * Re-tune playback quality level for the current player
16420 * conditions. This method will perform destructive actions like removing
16421 * already buffered content in order to readjust the currently active
16422 * playlist quickly. This is good for manual quality changes
16423 *
16424 * @private
16425 */
16426
16427 }, {
16428 key: 'fastQualityChange_',
16429 value: function fastQualityChange_() {
16430 var _this4 = this;
16431
16432 var media = this.selectPlaylist();
16433
16434 if (media === this.masterPlaylistLoader_.media()) {
16435 return;
16436 }
16437
16438 this.masterPlaylistLoader_.media(media);
16439
16440 // Delete all buffered data to allow an immediate quality switch, then seek to give
16441 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
16442 // ahead is roughly the minimum that will accomplish this across a variety of content
16443 // in IE and Edge, but seeking in place is sufficient on all other browsers)
16444 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
16445 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
16446 this.mainSegmentLoader_.resetEverything(function () {
16447 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
16448 // from the previously enabled rendition to load before the new playlist has finished loading
16449 if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
16450 _this4.tech_.setCurrentTime(_this4.tech_.currentTime() + 0.04);
16451 } else {
16452 _this4.tech_.setCurrentTime(_this4.tech_.currentTime());
16453 }
16454 });
16455
16456 // don't need to reset audio as it is reset when media changes
16457 }
16458
16459 /**
16460 * Begin playback.
16461 */
16462
16463 }, {
16464 key: 'play',
16465 value: function play() {
16466 if (this.setupFirstPlay()) {
16467 return;
16468 }
16469
16470 if (this.tech_.ended()) {
16471 this.tech_.setCurrentTime(0);
16472 }
16473
16474 if (this.hasPlayed_) {
16475 this.load();
16476 }
16477
16478 var seekable$$1 = this.tech_.seekable();
16479
16480 // if the viewer has paused and we fell out of the live window,
16481 // seek forward to the live point
16482 if (this.tech_.duration() === Infinity) {
16483 if (this.tech_.currentTime() < seekable$$1.start(0)) {
16484 return this.tech_.setCurrentTime(seekable$$1.end(seekable$$1.length - 1));
16485 }
16486 }
16487 }
16488
16489 /**
16490 * Seek to the latest media position if this is a live video and the
16491 * player and video are loaded and initialized.
16492 */
16493
16494 }, {
16495 key: 'setupFirstPlay',
16496 value: function setupFirstPlay() {
16497 var _this5 = this;
16498
16499 var media = this.masterPlaylistLoader_.media();
16500
16501 // Check that everything is ready to begin buffering for the first call to play
16502 // If 1) there is no active media
16503 // 2) the player is paused
16504 // 3) the first play has already been setup
16505 // then exit early
16506 if (!media || this.tech_.paused() || this.hasPlayed_) {
16507 return false;
16508 }
16509
16510 // when the video is a live stream
16511 if (!media.endList) {
16512 var seekable$$1 = this.seekable();
16513
16514 if (!seekable$$1.length) {
16515 // without a seekable range, the player cannot seek to begin buffering at the live
16516 // point
16517 return false;
16518 }
16519
16520 if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
16521 // IE11 throws an InvalidStateError if you try to set currentTime while the
16522 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
16523 this.tech_.one('loadedmetadata', function () {
16524 _this5.trigger('firstplay');
16525 _this5.tech_.setCurrentTime(seekable$$1.end(0));
16526 _this5.hasPlayed_ = true;
16527 });
16528
16529 return false;
16530 }
16531
16532 // trigger firstplay to inform the source handler to ignore the next seek event
16533 this.trigger('firstplay');
16534 // seek to the live point
16535 this.tech_.setCurrentTime(seekable$$1.end(0));
16536 }
16537
16538 this.hasPlayed_ = true;
16539 // we can begin loading now that everything is ready
16540 this.load();
16541 return true;
16542 }
16543
16544 /**
16545 * handle the sourceopen event on the MediaSource
16546 *
16547 * @private
16548 */
16549
16550 }, {
16551 key: 'handleSourceOpen_',
16552 value: function handleSourceOpen_() {
16553 // Only attempt to create the source buffer if none already exist.
16554 // handleSourceOpen is also called when we are "re-opening" a source buffer
16555 // after `endOfStream` has been called (in response to a seek for instance)
16556 try {
16557 this.setupSourceBuffers_();
16558 } catch (e) {
16559 videojs.log.warn('Failed to create Source Buffers', e);
16560 return this.mediaSource.endOfStream('decode');
16561 }
16562
16563 // if autoplay is enabled, begin playback. This is duplicative of
16564 // code in video.js but is required because play() must be invoked
16565 // *after* the media source has opened.
16566 if (this.tech_.autoplay()) {
16567 var playPromise = this.tech_.play();
16568
16569 // Catch/silence error when a pause interrupts a play request
16570 // on browsers which return a promise
16571 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
16572 playPromise.then(null, function (e) {});
16573 }
16574 }
16575
16576 this.trigger('sourceopen');
16577 }
16578
16579 /**
16580 * Calls endOfStream on the media source when all active stream types have called
16581 * endOfStream
16582 *
16583 * @param {string} streamType
16584 * Stream type of the segment loader that called endOfStream
16585 * @private
16586 */
16587
16588 }, {
16589 key: 'onEndOfStream',
16590 value: function onEndOfStream() {
16591 var isEndOfStream = this.mainSegmentLoader_.ended_;
16592
16593 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
16594 // if the audio playlist loader exists, then alternate audio is active
16595 if (!this.mainSegmentLoader_.startingMedia_ || this.mainSegmentLoader_.startingMedia_.containsVideo) {
16596 // if we do not know if the main segment loader contains video yet or if we
16597 // definitively know the main segment loader contains video, then we need to wait
16598 // for both main and audio segment loaders to call endOfStream
16599 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
16600 } else {
16601 // otherwise just rely on the audio loader
16602 isEndOfStream = this.audioSegmentLoader_.ended_;
16603 }
16604 }
16605
16606 if (!isEndOfStream) {
16607 return;
16608 }
16609
16610 this.logger_('calling mediaSource.endOfStream()');
16611 // on chrome calling endOfStream can sometimes cause an exception,
16612 // even when the media source is in a valid state.
16613 try {
16614 this.mediaSource.endOfStream();
16615 } catch (e) {
16616 videojs.log.warn('Failed to call media source endOfStream', e);
16617 }
16618 }
16619
16620 /**
16621 * Check if a playlist has stopped being updated
16622 * @param {Object} playlist the media playlist object
16623 * @return {boolean} whether the playlist has stopped being updated or not
16624 */
16625
16626 }, {
16627 key: 'stuckAtPlaylistEnd_',
16628 value: function stuckAtPlaylistEnd_(playlist) {
16629 var seekable$$1 = this.seekable();
16630
16631 if (!seekable$$1.length) {
16632 // playlist doesn't have enough information to determine whether we are stuck
16633 return false;
16634 }
16635
16636 var expired = this.syncController_.getExpiredTime(playlist, this.mediaSource.duration);
16637
16638 if (expired === null) {
16639 return false;
16640 }
16641
16642 // does not use the safe live end to calculate playlist end, since we
16643 // don't want to say we are stuck while there is still content
16644 var absolutePlaylistEnd = Hls.Playlist.playlistEnd(playlist, expired);
16645 var currentTime = this.tech_.currentTime();
16646 var buffered = this.tech_.buffered();
16647
16648 if (!buffered.length) {
16649 // return true if the playhead reached the absolute end of the playlist
16650 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
16651 }
16652 var bufferedEnd = buffered.end(buffered.length - 1);
16653
16654 // return true if there is too little buffer left and buffer has reached absolute
16655 // end of playlist
16656 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
16657 }
16658
16659 /**
16660 * Blacklists a playlist when an error occurs for a set amount of time
16661 * making it unavailable for selection by the rendition selection algorithm
16662 * and then forces a new playlist (rendition) selection.
16663 *
16664 * @param {Object=} error an optional error that may include the playlist
16665 * to blacklist
16666 * @param {Number=} blacklistDuration an optional number of seconds to blacklist the
16667 * playlist
16668 */
16669
16670 }, {
16671 key: 'blacklistCurrentPlaylist',
16672 value: function blacklistCurrentPlaylist() {
16673 var error = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
16674 var blacklistDuration = arguments[1];
16675
16676 var currentPlaylist = void 0;
16677 var nextPlaylist = void 0;
16678
16679 // If the `error` was generated by the playlist loader, it will contain
16680 // the playlist we were trying to load (but failed) and that should be
16681 // blacklisted instead of the currently selected playlist which is likely
16682 // out-of-date in this scenario
16683 currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
16684
16685 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration;
16686
16687 // If there is no current playlist, then an error occurred while we were
16688 // trying to load the master OR while we were disposing of the tech
16689 if (!currentPlaylist) {
16690 this.error = error;
16691
16692 try {
16693 return this.mediaSource.endOfStream('network');
16694 } catch (e) {
16695 return this.trigger('error');
16696 }
16697 }
16698
16699 var isFinalRendition = this.masterPlaylistLoader_.master.playlists.filter(isEnabled).length === 1;
16700 var playlists = this.masterPlaylistLoader_.master.playlists;
16701
16702 if (playlists.length === 1) {
16703 // Never blacklisting this playlist because it's the only playlist
16704 videojs.log.warn('Problem encountered with the current ' + 'HLS playlist. Trying again since it is the only playlist.');
16705
16706 this.tech_.trigger('retryplaylist');
16707 return this.masterPlaylistLoader_.load(isFinalRendition);
16708 }
16709
16710 if (isFinalRendition) {
16711 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
16712 // it, instead of erring the player or retrying this playlist, clear out the current
16713 // blacklist. This allows other playlists to be attempted in case any have been
16714 // fixed.
16715 videojs.log.warn('Removing all playlists from the blacklist because the last ' + 'rendition is about to be blacklisted.');
16716 playlists.forEach(function (playlist) {
16717 if (playlist.excludeUntil !== Infinity) {
16718 delete playlist.excludeUntil;
16719 }
16720 });
16721 // Technically we are retrying a playlist, in that we are simply retrying a previous
16722 // playlist. This is needed for users relying on the retryplaylist event to catch a
16723 // case where the player might be stuck and looping through "dead" playlists.
16724 this.tech_.trigger('retryplaylist');
16725 }
16726
16727 // Blacklist this playlist
16728 currentPlaylist.excludeUntil = Date.now() + blacklistDuration * 1000;
16729 this.tech_.trigger('blacklistplaylist');
16730 this.tech_.trigger({ type: 'usage', name: 'hls-rendition-blacklisted' });
16731
16732 // Select a new playlist
16733 nextPlaylist = this.selectPlaylist();
16734 videojs.log.warn('Problem encountered with the current HLS playlist.' + (error.message ? ' ' + error.message : '') + ' Switching to another playlist.');
16735
16736 return this.masterPlaylistLoader_.media(nextPlaylist, isFinalRendition);
16737 }
16738
16739 /**
16740 * Pause all segment loaders
16741 */
16742
16743 }, {
16744 key: 'pauseLoading',
16745 value: function pauseLoading() {
16746 this.mainSegmentLoader_.pause();
16747 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
16748 this.audioSegmentLoader_.pause();
16749 }
16750 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
16751 this.subtitleSegmentLoader_.pause();
16752 }
16753 }
16754
16755 /**
16756 * set the current time on all segment loaders
16757 *
16758 * @param {TimeRange} currentTime the current time to set
16759 * @return {TimeRange} the current time
16760 */
16761
16762 }, {
16763 key: 'setCurrentTime',
16764 value: function setCurrentTime(currentTime) {
16765 var buffered = findRange(this.tech_.buffered(), currentTime);
16766
16767 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
16768 // return immediately if the metadata is not ready yet
16769 return 0;
16770 }
16771
16772 // it's clearly an edge-case but don't thrown an error if asked to
16773 // seek within an empty playlist
16774 if (!this.masterPlaylistLoader_.media().segments) {
16775 return 0;
16776 }
16777
16778 // In flash playback, the segment loaders should be reset on every seek, even
16779 // in buffer seeks. If the seek location is already buffered, continue buffering as
16780 // usual
16781 // TODO: redo this comment
16782 if (buffered && buffered.length) {
16783 return currentTime;
16784 }
16785
16786 // cancel outstanding requests so we begin buffering at the new
16787 // location
16788 this.mainSegmentLoader_.resetEverything();
16789 this.mainSegmentLoader_.abort();
16790 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
16791 this.audioSegmentLoader_.resetEverything();
16792 this.audioSegmentLoader_.abort();
16793 }
16794 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
16795 this.subtitleSegmentLoader_.resetEverything();
16796 this.subtitleSegmentLoader_.abort();
16797 }
16798
16799 // start segment loader loading in case they are paused
16800 this.load();
16801 }
16802
16803 /**
16804 * get the current duration
16805 *
16806 * @return {TimeRange} the duration
16807 */
16808
16809 }, {
16810 key: 'duration',
16811 value: function duration$$1() {
16812 if (!this.masterPlaylistLoader_) {
16813 return 0;
16814 }
16815
16816 if (this.mediaSource) {
16817 return this.mediaSource.duration;
16818 }
16819
16820 return Hls.Playlist.duration(this.masterPlaylistLoader_.media());
16821 }
16822
16823 /**
16824 * check the seekable range
16825 *
16826 * @return {TimeRange} the seekable range
16827 */
16828
16829 }, {
16830 key: 'seekable',
16831 value: function seekable$$1() {
16832 return this.seekable_;
16833 }
16834 }, {
16835 key: 'onSyncInfoUpdate_',
16836 value: function onSyncInfoUpdate_() {
16837 var audioSeekable = void 0;
16838
16839 if (!this.masterPlaylistLoader_) {
16840 return;
16841 }
16842
16843 var media = this.masterPlaylistLoader_.media();
16844
16845 if (!media) {
16846 return;
16847 }
16848
16849 var expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
16850
16851 if (expired === null) {
16852 // not enough information to update seekable
16853 return;
16854 }
16855
16856 var suggestedPresentationDelay = this.masterPlaylistLoader_.master.suggestedPresentationDelay;
16857 var mainSeekable = Hls.Playlist.seekable(media, expired, suggestedPresentationDelay);
16858
16859 if (mainSeekable.length === 0) {
16860 return;
16861 }
16862
16863 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
16864 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
16865 expired = this.syncController_.getExpiredTime(media, this.mediaSource.duration);
16866
16867 if (expired === null) {
16868 return;
16869 }
16870
16871 audioSeekable = Hls.Playlist.seekable(media, expired, suggestedPresentationDelay);
16872
16873 if (audioSeekable.length === 0) {
16874 return;
16875 }
16876 }
16877
16878 var oldEnd = void 0;
16879 var oldStart = void 0;
16880
16881 if (this.seekable_ && this.seekable_.length) {
16882 oldEnd = this.seekable_.end(0);
16883 oldStart = this.seekable_.start(0);
16884 }
16885
16886 if (!audioSeekable) {
16887 // seekable has been calculated based on buffering video data so it
16888 // can be returned directly
16889 this.seekable_ = mainSeekable;
16890 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
16891 // seekables are pretty far off, rely on main
16892 this.seekable_ = mainSeekable;
16893 } else {
16894 this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
16895 }
16896
16897 // seekable is the same as last time
16898 if (this.seekable_ && this.seekable_.length) {
16899 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
16900 return;
16901 }
16902 }
16903
16904 this.logger_('seekable updated [' + printableRange(this.seekable_) + ']');
16905
16906 this.tech_.trigger('seekablechanged');
16907 }
16908
16909 /**
16910 * Update the player duration
16911 */
16912
16913 }, {
16914 key: 'updateDuration',
16915 value: function updateDuration() {
16916 var _this6 = this;
16917
16918 var oldDuration = this.mediaSource.duration;
16919 var newDuration = Hls.Playlist.duration(this.masterPlaylistLoader_.media());
16920 var buffered = this.tech_.buffered();
16921 var setDuration = function setDuration() {
16922 // on firefox setting the duration may sometimes cause an exception
16923 // even if the media source is open and source buffers are not
16924 // updating, something about the media source being in an invalid state.
16925 _this6.logger_('Setting duration from ' + _this6.mediaSource.duration + ' => ' + newDuration);
16926 try {
16927 _this6.mediaSource.duration = newDuration;
16928 } catch (e) {
16929 videojs.log.warn('Failed to set media source duration', e);
16930 }
16931 _this6.tech_.trigger('durationchange');
16932
16933 _this6.mediaSource.removeEventListener('sourceopen', setDuration);
16934 };
16935
16936 if (buffered.length > 0) {
16937 newDuration = Math.max(newDuration, buffered.end(buffered.length - 1));
16938 }
16939
16940 // if the duration has changed, invalidate the cached value
16941 if (oldDuration !== newDuration) {
16942 // update the duration
16943 if (this.mediaSource.readyState !== 'open') {
16944 this.mediaSource.addEventListener('sourceopen', setDuration);
16945 } else {
16946 setDuration();
16947 }
16948 }
16949 }
16950
16951 /**
16952 * dispose of the MasterPlaylistController and everything
16953 * that it controls
16954 */
16955
16956 }, {
16957 key: 'dispose',
16958 value: function dispose() {
16959 var _this7 = this;
16960
16961 this.trigger('dispose');
16962 if (this.decrypter_) {
16963 this.decrypter_.terminate();
16964 }
16965 this.masterPlaylistLoader_.dispose();
16966 this.mainSegmentLoader_.dispose();
16967
16968 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
16969 var groups = _this7.mediaTypes_[type].groups;
16970
16971 for (var id in groups) {
16972 groups[id].forEach(function (group) {
16973 if (group.playlistLoader) {
16974 group.playlistLoader.dispose();
16975 }
16976 });
16977 }
16978 });
16979
16980 this.audioSegmentLoader_.dispose();
16981 this.subtitleSegmentLoader_.dispose();
16982 this.off();
16983
16984 if (this.mediaSource.dispose) {
16985 this.mediaSource.dispose();
16986 }
16987 }
16988
16989 /**
16990 * return the master playlist object if we have one
16991 *
16992 * @return {Object} the master playlist object that we parsed
16993 */
16994
16995 }, {
16996 key: 'master',
16997 value: function master() {
16998 return this.masterPlaylistLoader_.master;
16999 }
17000
17001 /**
17002 * return the currently selected playlist
17003 *
17004 * @return {Object} the currently selected playlist object that we parsed
17005 */
17006
17007 }, {
17008 key: 'media',
17009 value: function media() {
17010 // playlist loader will not return media if it has not been fully loaded
17011 return this.masterPlaylistLoader_.media() || this.initialMedia_;
17012 }
17013
17014 /**
17015 * setup our internal source buffers on our segment Loaders
17016 *
17017 * @private
17018 */
17019
17020 }, {
17021 key: 'setupSourceBuffers_',
17022 value: function setupSourceBuffers_() {
17023 var media = this.masterPlaylistLoader_.media();
17024 var mimeTypes = void 0;
17025
17026 // wait until a media playlist is available and the Media Source is
17027 // attached
17028 if (!media || this.mediaSource.readyState !== 'open') {
17029 return;
17030 }
17031
17032 mimeTypes = mimeTypesForPlaylist(this.masterPlaylistLoader_.master, media);
17033 if (mimeTypes.length < 1) {
17034 this.error = 'No compatible SourceBuffer configuration for the variant stream:' + media.resolvedUri;
17035 return this.mediaSource.endOfStream('decode');
17036 }
17037
17038 this.configureLoaderMimeTypes_(mimeTypes);
17039 // exclude any incompatible variant streams from future playlist
17040 // selection
17041 this.excludeIncompatibleVariants_(media);
17042 }
17043 }, {
17044 key: 'configureLoaderMimeTypes_',
17045 value: function configureLoaderMimeTypes_(mimeTypes) {
17046 // If the content is demuxed, we can't start appending segments to a source buffer
17047 // until both source buffers are set up, or else the browser may not let us add the
17048 // second source buffer (it will assume we are playing either audio only or video
17049 // only).
17050 var sourceBufferEmitter =
17051 // If there is more than one mime type
17052 mimeTypes.length > 1 &&
17053 // and the first mime type does not have muxed video and audio
17054 mimeTypes[0].indexOf(',') === -1 &&
17055 // and the two mime types are different (they can be the same in the case of audio
17056 // only with alternate audio)
17057 mimeTypes[0] !== mimeTypes[1] ?
17058 // then we want to wait on the second source buffer
17059 new videojs.EventTarget() :
17060 // otherwise there is no need to wait as the content is either audio only,
17061 // video only, or muxed content.
17062 null;
17063
17064 this.mainSegmentLoader_.mimeType(mimeTypes[0], sourceBufferEmitter);
17065 if (mimeTypes[1]) {
17066 this.audioSegmentLoader_.mimeType(mimeTypes[1], sourceBufferEmitter);
17067 }
17068 }
17069
17070 /**
17071 * Blacklists playlists with codecs that are unsupported by the browser.
17072 */
17073
17074 }, {
17075 key: 'excludeUnsupportedVariants_',
17076 value: function excludeUnsupportedVariants_() {
17077 this.master().playlists.forEach(function (variant) {
17078 if (variant.attributes.CODECS && window$1.MediaSource && window$1.MediaSource.isTypeSupported && !window$1.MediaSource.isTypeSupported('video/mp4; codecs="' + mapLegacyAvcCodecs(variant.attributes.CODECS) + '"')) {
17079 variant.excludeUntil = Infinity;
17080 }
17081 });
17082 }
17083
17084 /**
17085 * Blacklist playlists that are known to be codec or
17086 * stream-incompatible with the SourceBuffer configuration. For
17087 * instance, Media Source Extensions would cause the video element to
17088 * stall waiting for video data if you switched from a variant with
17089 * video and audio to an audio-only one.
17090 *
17091 * @param {Object} media a media playlist compatible with the current
17092 * set of SourceBuffers. Variants in the current master playlist that
17093 * do not appear to have compatible codec or stream configurations
17094 * will be excluded from the default playlist selection algorithm
17095 * indefinitely.
17096 * @private
17097 */
17098
17099 }, {
17100 key: 'excludeIncompatibleVariants_',
17101 value: function excludeIncompatibleVariants_(media) {
17102 var codecCount = 2;
17103 var videoCodec = null;
17104 var codecs = void 0;
17105
17106 if (media.attributes.CODECS) {
17107 codecs = parseCodecs(media.attributes.CODECS);
17108 videoCodec = codecs.videoCodec;
17109 codecCount = codecs.codecCount;
17110 }
17111
17112 this.master().playlists.forEach(function (variant) {
17113 var variantCodecs = {
17114 codecCount: 2,
17115 videoCodec: null
17116 };
17117
17118 if (variant.attributes.CODECS) {
17119 variantCodecs = parseCodecs(variant.attributes.CODECS);
17120 }
17121
17122 // if the streams differ in the presence or absence of audio or
17123 // video, they are incompatible
17124 if (variantCodecs.codecCount !== codecCount) {
17125 variant.excludeUntil = Infinity;
17126 }
17127
17128 // if h.264 is specified on the current playlist, some flavor of
17129 // it must be specified on all compatible variants
17130 if (variantCodecs.videoCodec !== videoCodec) {
17131 variant.excludeUntil = Infinity;
17132 }
17133 });
17134 }
17135 }, {
17136 key: 'updateAdCues_',
17137 value: function updateAdCues_(media) {
17138 var offset = 0;
17139 var seekable$$1 = this.seekable();
17140
17141 if (seekable$$1.length) {
17142 offset = seekable$$1.start(0);
17143 }
17144
17145 updateAdCues(media, this.cueTagsTrack_, offset);
17146 }
17147
17148 /**
17149 * Calculates the desired forward buffer length based on current time
17150 *
17151 * @return {Number} Desired forward buffer length in seconds
17152 */
17153
17154 }, {
17155 key: 'goalBufferLength',
17156 value: function goalBufferLength() {
17157 var currentTime = this.tech_.currentTime();
17158 var initial = Config.GOAL_BUFFER_LENGTH;
17159 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
17160 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
17161
17162 return Math.min(initial + currentTime * rate, max);
17163 }
17164
17165 /**
17166 * Calculates the desired buffer low water line based on current time
17167 *
17168 * @return {Number} Desired buffer low water line in seconds
17169 */
17170
17171 }, {
17172 key: 'bufferLowWaterLine',
17173 value: function bufferLowWaterLine() {
17174 var currentTime = this.tech_.currentTime();
17175 var initial = Config.BUFFER_LOW_WATER_LINE;
17176 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
17177 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
17178
17179 return Math.min(initial + currentTime * rate, max);
17180 }
17181 }]);
17182 return MasterPlaylistController;
17183}(videojs.EventTarget);
17184
17185/**
17186 * Returns a function that acts as the Enable/disable playlist function.
17187 *
17188 * @param {PlaylistLoader} loader - The master playlist loader
17189
17190 * @param {string} playlistID - id of the playlist
17191 * @param {Function} changePlaylistFn - A function to be called after a
17192 * playlist's enabled-state has been changed. Will NOT be called if a
17193 * playlist's enabled-state is unchanged
17194 * @param {Boolean=} enable - Value to set the playlist enabled-state to
17195 * or if undefined returns the current enabled-state for the playlist
17196 * @return {Function} Function for setting/getting enabled
17197 */
17198var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
17199 return function (enable) {
17200 var playlist = loader.master.playlists[playlistID];
17201 var incompatible = isIncompatible(playlist);
17202 var currentlyEnabled = isEnabled(playlist);
17203
17204 if (typeof enable === 'undefined') {
17205 return currentlyEnabled;
17206 }
17207
17208 if (enable) {
17209 delete playlist.disabled;
17210 } else {
17211 playlist.disabled = true;
17212 }
17213
17214 if (enable !== currentlyEnabled && !incompatible) {
17215 // Ensure the outside world knows about our changes
17216 changePlaylistFn();
17217 if (enable) {
17218 loader.trigger('renditionenabled');
17219 } else {
17220 loader.trigger('renditiondisabled');
17221 }
17222 }
17223 return enable;
17224 };
17225};
17226
17227/**
17228 * The representation object encapsulates the publicly visible information
17229 * in a media playlist along with a setter/getter-type function (enabled)
17230 * for changing the enabled-state of a particular playlist entry
17231 *
17232 * @class Representation
17233 */
17234
17235var Representation = function Representation(hlsHandler, playlist, id) {
17236 classCallCheck(this, Representation);
17237 var mpc = hlsHandler.masterPlaylistController_,
17238 smoothQualityChange = hlsHandler.options_.smoothQualityChange;
17239 // Get a reference to a bound version of the quality change function
17240
17241 var changeType = smoothQualityChange ? 'smooth' : 'fast';
17242 var qualityChangeFunction = mpc[changeType + 'QualityChange_'].bind(mpc);
17243
17244 // some playlist attributes are optional
17245 if (playlist.attributes.RESOLUTION) {
17246 var resolution = playlist.attributes.RESOLUTION;
17247
17248 this.width = resolution.width;
17249 this.height = resolution.height;
17250 }
17251
17252 this.bandwidth = playlist.attributes.BANDWIDTH;
17253
17254 // The id is simply the ordinality of the media playlist
17255 // within the master playlist
17256 this.id = id;
17257
17258 // Partially-apply the enableFunction to create a playlist-
17259 // specific variant
17260 this.enabled = enableFunction(hlsHandler.playlists, playlist.id, qualityChangeFunction);
17261};
17262
17263/**
17264 * A mixin function that adds the `representations` api to an instance
17265 * of the HlsHandler class
17266 * @param {HlsHandler} hlsHandler - An instance of HlsHandler to add the
17267 * representation API into
17268 */
17269
17270
17271var renditionSelectionMixin = function renditionSelectionMixin(hlsHandler) {
17272 var playlists = hlsHandler.playlists;
17273
17274 // Add a single API-specific function to the HlsHandler instance
17275 hlsHandler.representations = function () {
17276 if (!playlists || !playlists.master || !playlists.master.playlists) {
17277 return [];
17278 }
17279 return playlists.master.playlists.filter(function (media) {
17280 return !isIncompatible(media);
17281 }).map(function (e, i) {
17282 return new Representation(hlsHandler, e, e.id);
17283 });
17284 };
17285};
17286
17287/**
17288 * @file playback-watcher.js
17289 *
17290 * Playback starts, and now my watch begins. It shall not end until my death. I shall
17291 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
17292 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
17293 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
17294 * my life and honor to the Playback Watch, for this Player and all the Players to come.
17295 */
17296
17297// Set of events that reset the playback-watcher time check logic and clear the timeout
17298var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
17299
17300/**
17301 * @class PlaybackWatcher
17302 */
17303
17304var PlaybackWatcher = function () {
17305 /**
17306 * Represents an PlaybackWatcher object.
17307 * @constructor
17308 * @param {object} options an object that includes the tech and settings
17309 */
17310 function PlaybackWatcher(options) {
17311 var _this = this;
17312
17313 classCallCheck(this, PlaybackWatcher);
17314
17315 this.tech_ = options.tech;
17316 this.seekable = options.seekable;
17317 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
17318 this.media = options.media;
17319
17320 this.consecutiveUpdates = 0;
17321 this.lastRecordedTime = null;
17322 this.timer_ = null;
17323 this.checkCurrentTimeTimeout_ = null;
17324 this.logger_ = logger('PlaybackWatcher');
17325
17326 this.logger_('initialize');
17327
17328 var canPlayHandler = function canPlayHandler() {
17329 return _this.monitorCurrentTime_();
17330 };
17331 var waitingHandler = function waitingHandler() {
17332 return _this.techWaiting_();
17333 };
17334 var cancelTimerHandler = function cancelTimerHandler() {
17335 return _this.cancelTimer_();
17336 };
17337 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
17338 return _this.fixesBadSeeks_();
17339 };
17340
17341 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
17342 this.tech_.on('waiting', waitingHandler);
17343 this.tech_.on(timerCancelEvents, cancelTimerHandler);
17344 this.tech_.on('canplay', canPlayHandler);
17345
17346 // Define the dispose function to clean up our events
17347 this.dispose = function () {
17348 _this.logger_('dispose');
17349 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
17350 _this.tech_.off('waiting', waitingHandler);
17351 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
17352 _this.tech_.off('canplay', canPlayHandler);
17353 if (_this.checkCurrentTimeTimeout_) {
17354 window$1.clearTimeout(_this.checkCurrentTimeTimeout_);
17355 }
17356 _this.cancelTimer_();
17357 };
17358 }
17359
17360 /**
17361 * Periodically check current time to see if playback stopped
17362 *
17363 * @private
17364 */
17365
17366
17367 createClass(PlaybackWatcher, [{
17368 key: 'monitorCurrentTime_',
17369 value: function monitorCurrentTime_() {
17370 this.checkCurrentTime_();
17371
17372 if (this.checkCurrentTimeTimeout_) {
17373 window$1.clearTimeout(this.checkCurrentTimeTimeout_);
17374 }
17375
17376 // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
17377 this.checkCurrentTimeTimeout_ = window$1.setTimeout(this.monitorCurrentTime_.bind(this), 250);
17378 }
17379
17380 /**
17381 * The purpose of this function is to emulate the "waiting" event on
17382 * browsers that do not emit it when they are waiting for more
17383 * data to continue playback
17384 *
17385 * @private
17386 */
17387
17388 }, {
17389 key: 'checkCurrentTime_',
17390 value: function checkCurrentTime_() {
17391 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
17392 this.consecutiveUpdates = 0;
17393 this.lastRecordedTime = this.tech_.currentTime();
17394 return;
17395 }
17396
17397 if (this.tech_.paused() || this.tech_.seeking()) {
17398 return;
17399 }
17400
17401 var currentTime = this.tech_.currentTime();
17402 var buffered = this.tech_.buffered();
17403
17404 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
17405 // If current time is at the end of the final buffered region, then any playback
17406 // stall is most likely caused by buffering in a low bandwidth environment. The tech
17407 // should fire a `waiting` event in this scenario, but due to browser and tech
17408 // inconsistencies. Calling `techWaiting_` here allows us to simulate
17409 // responding to a native `waiting` event when the tech fails to emit one.
17410 return this.techWaiting_();
17411 }
17412
17413 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
17414 this.consecutiveUpdates++;
17415 this.waiting_();
17416 } else if (currentTime === this.lastRecordedTime) {
17417 this.consecutiveUpdates++;
17418 } else {
17419 this.consecutiveUpdates = 0;
17420 this.lastRecordedTime = currentTime;
17421 }
17422 }
17423
17424 /**
17425 * Cancels any pending timers and resets the 'timeupdate' mechanism
17426 * designed to detect that we are stalled
17427 *
17428 * @private
17429 */
17430
17431 }, {
17432 key: 'cancelTimer_',
17433 value: function cancelTimer_() {
17434 this.consecutiveUpdates = 0;
17435
17436 if (this.timer_) {
17437 this.logger_('cancelTimer_');
17438 clearTimeout(this.timer_);
17439 }
17440
17441 this.timer_ = null;
17442 }
17443
17444 /**
17445 * Fixes situations where there's a bad seek
17446 *
17447 * @return {Boolean} whether an action was taken to fix the seek
17448 * @private
17449 */
17450
17451 }, {
17452 key: 'fixesBadSeeks_',
17453 value: function fixesBadSeeks_() {
17454 var seeking = this.tech_.seeking();
17455
17456 if (!seeking) {
17457 return false;
17458 }
17459
17460 var seekable = this.seekable();
17461 var currentTime = this.tech_.currentTime();
17462 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
17463 var seekTo = void 0;
17464
17465 if (isAfterSeekableRange) {
17466 var seekableEnd = seekable.end(seekable.length - 1);
17467
17468 // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
17469 seekTo = seekableEnd;
17470 }
17471
17472 if (this.beforeSeekableWindow_(seekable, currentTime)) {
17473 var seekableStart = seekable.start(0);
17474
17475 // sync to the beginning of the live window
17476 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
17477 seekTo = seekableStart + SAFE_TIME_DELTA;
17478 }
17479
17480 if (typeof seekTo !== 'undefined') {
17481 this.logger_('Trying to seek outside of seekable at time ' + currentTime + ' with ' + ('seekable range ' + printableRange(seekable) + '. Seeking to ') + (seekTo + '.'));
17482
17483 this.tech_.setCurrentTime(seekTo);
17484 return true;
17485 }
17486
17487 return false;
17488 }
17489
17490 /**
17491 * Handler for situations when we determine the player is waiting.
17492 *
17493 * @private
17494 */
17495
17496 }, {
17497 key: 'waiting_',
17498 value: function waiting_() {
17499 if (this.techWaiting_()) {
17500 return;
17501 }
17502
17503 // All tech waiting checks failed. Use last resort correction
17504 var currentTime = this.tech_.currentTime();
17505 var buffered = this.tech_.buffered();
17506 var currentRange = findRange(buffered, currentTime);
17507
17508 // Sometimes the player can stall for unknown reasons within a contiguous buffered
17509 // region with no indication that anything is amiss (seen in Firefox). Seeking to
17510 // currentTime is usually enough to kickstart the player. This checks that the player
17511 // is currently within a buffered region before attempting a corrective seek.
17512 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
17513 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
17514 // make sure there is ~3 seconds of forward buffer before taking any corrective action
17515 // to avoid triggering an `unknownwaiting` event when the network is slow.
17516 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
17517 this.cancelTimer_();
17518 this.tech_.setCurrentTime(currentTime);
17519
17520 this.logger_('Stopped at ' + currentTime + ' while inside a buffered region ' + ('[' + currentRange.start(0) + ' -> ' + currentRange.end(0) + ']. Attempting to resume ') + 'playback by seeking to the current time.');
17521
17522 // unknown waiting corrections may be useful for monitoring QoS
17523 this.tech_.trigger({ type: 'usage', name: 'hls-unknown-waiting' });
17524 return;
17525 }
17526 }
17527
17528 /**
17529 * Handler for situations when the tech fires a `waiting` event
17530 *
17531 * @return {Boolean}
17532 * True if an action (or none) was needed to correct the waiting. False if no
17533 * checks passed
17534 * @private
17535 */
17536
17537 }, {
17538 key: 'techWaiting_',
17539 value: function techWaiting_() {
17540 var seekable = this.seekable();
17541 var currentTime = this.tech_.currentTime();
17542
17543 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
17544 // Tech is seeking or bad seek fixed, no action needed
17545 return true;
17546 }
17547
17548 if (this.tech_.seeking() || this.timer_ !== null) {
17549 // Tech is seeking or already waiting on another action, no action needed
17550 return true;
17551 }
17552
17553 if (this.beforeSeekableWindow_(seekable, currentTime)) {
17554 var livePoint = seekable.end(seekable.length - 1);
17555
17556 this.logger_('Fell out of live window at time ' + currentTime + '. Seeking to ' + ('live point (seekable end) ' + livePoint));
17557 this.cancelTimer_();
17558 this.tech_.setCurrentTime(livePoint);
17559
17560 // live window resyncs may be useful for monitoring QoS
17561 this.tech_.trigger({ type: 'usage', name: 'hls-live-resync' });
17562 return true;
17563 }
17564
17565 var buffered = this.tech_.buffered();
17566 var nextRange = findNextRange(buffered, currentTime);
17567
17568 if (this.videoUnderflow_(nextRange, buffered, currentTime)) {
17569 // Even though the video underflowed and was stuck in a gap, the audio overplayed
17570 // the gap, leading currentTime into a buffered range. Seeking to currentTime
17571 // allows the video to catch up to the audio position without losing any audio
17572 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
17573 this.cancelTimer_();
17574 this.tech_.setCurrentTime(currentTime);
17575
17576 // video underflow may be useful for monitoring QoS
17577 this.tech_.trigger({ type: 'usage', name: 'hls-video-underflow' });
17578 return true;
17579 }
17580
17581 // check for gap
17582 if (nextRange.length > 0) {
17583 var difference = nextRange.start(0) - currentTime;
17584
17585 this.logger_('Stopped at ' + currentTime + ', setting timer for ' + difference + ', seeking ' + ('to ' + nextRange.start(0)));
17586
17587 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
17588 return true;
17589 }
17590
17591 // All checks failed. Returning false to indicate failure to correct waiting
17592 return false;
17593 }
17594 }, {
17595 key: 'afterSeekableWindow_',
17596 value: function afterSeekableWindow_(seekable, currentTime, playlist) {
17597 var allowSeeksWithinUnsafeLiveWindow = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
17598
17599 if (!seekable.length) {
17600 // we can't make a solid case if there's no seekable, default to false
17601 return false;
17602 }
17603
17604 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
17605 var isLive = !playlist.endList;
17606
17607 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
17608 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
17609 }
17610
17611 if (currentTime > allowedEnd) {
17612 return true;
17613 }
17614
17615 return false;
17616 }
17617 }, {
17618 key: 'beforeSeekableWindow_',
17619 value: function beforeSeekableWindow_(seekable, currentTime) {
17620 if (seekable.length &&
17621 // can't fall before 0 and 0 seekable start identifies VOD stream
17622 seekable.start(0) > 0 && currentTime < seekable.start(0) - SAFE_TIME_DELTA) {
17623 return true;
17624 }
17625
17626 return false;
17627 }
17628 }, {
17629 key: 'videoUnderflow_',
17630 value: function videoUnderflow_(nextRange, buffered, currentTime) {
17631 if (nextRange.length === 0) {
17632 // Even if there is no available next range, there is still a possibility we are
17633 // stuck in a gap due to video underflow.
17634 var gap = this.gapFromVideoUnderflow_(buffered, currentTime);
17635
17636 if (gap) {
17637 this.logger_('Encountered a gap in video from ' + gap.start + ' to ' + gap.end + '. ' + ('Seeking to current time ' + currentTime));
17638
17639 return true;
17640 }
17641 }
17642
17643 return false;
17644 }
17645
17646 /**
17647 * Timer callback. If playback still has not proceeded, then we seek
17648 * to the start of the next buffered region.
17649 *
17650 * @private
17651 */
17652
17653 }, {
17654 key: 'skipTheGap_',
17655 value: function skipTheGap_(scheduledCurrentTime) {
17656 var buffered = this.tech_.buffered();
17657 var currentTime = this.tech_.currentTime();
17658 var nextRange = findNextRange(buffered, currentTime);
17659
17660 this.cancelTimer_();
17661
17662 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
17663 return;
17664 }
17665
17666 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0));
17667
17668 // only seek if we still have not played
17669 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
17670
17671 this.tech_.trigger({ type: 'usage', name: 'hls-gap-skip' });
17672 }
17673 }, {
17674 key: 'gapFromVideoUnderflow_',
17675 value: function gapFromVideoUnderflow_(buffered, currentTime) {
17676 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
17677 // playing for ~3 seconds after the video gap starts. This is done to account for
17678 // video buffer underflow/underrun (note that this is not done when there is audio
17679 // buffer underflow/underrun -- in that case the video will stop as soon as it
17680 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
17681 // video stalls). The player's time will reflect the playthrough of audio, so the
17682 // time will appear as if we are in a buffered region, even if we are stuck in a
17683 // "gap."
17684 //
17685 // Example:
17686 // video buffer: 0 => 10.1, 10.2 => 20
17687 // audio buffer: 0 => 20
17688 // overall buffer: 0 => 10.1, 10.2 => 20
17689 // current time: 13
17690 //
17691 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
17692 // however, the audio continued playing until it reached ~3 seconds past the gap
17693 // (13 seconds), at which point it stops as well. Since current time is past the
17694 // gap, findNextRange will return no ranges.
17695 //
17696 // To check for this issue, we see if there is a gap that starts somewhere within
17697 // a 3 second range (3 seconds +/- 1 second) back from our current time.
17698 var gaps = findGaps(buffered);
17699
17700 for (var i = 0; i < gaps.length; i++) {
17701 var start = gaps.start(i);
17702 var end = gaps.end(i);
17703
17704 // gap is starts no more than 4 seconds back
17705 if (currentTime - start < 4 && currentTime - start > 2) {
17706 return {
17707 start: start,
17708 end: end
17709 };
17710 }
17711 }
17712
17713 return null;
17714 }
17715 }]);
17716 return PlaybackWatcher;
17717}();
17718
17719var defaultOptions = {
17720 errorInterval: 30,
17721 getSource: function getSource(next) {
17722 var tech = this.tech({ IWillNotUseThisInPlugins: true });
17723 var sourceObj = tech.currentSource_;
17724
17725 return next(sourceObj);
17726 }
17727};
17728
17729/**
17730 * Main entry point for the plugin
17731 *
17732 * @param {Player} player a reference to a videojs Player instance
17733 * @param {Object} [options] an object with plugin options
17734 * @private
17735 */
17736var initPlugin = function initPlugin(player, options) {
17737 var lastCalled = 0;
17738 var seekTo = 0;
17739 var localOptions = videojs.mergeOptions(defaultOptions, options);
17740
17741 player.ready(function () {
17742 player.trigger({ type: 'usage', name: 'hls-error-reload-initialized' });
17743 });
17744
17745 /**
17746 * Player modifications to perform that must wait until `loadedmetadata`
17747 * has been triggered
17748 *
17749 * @private
17750 */
17751 var loadedMetadataHandler = function loadedMetadataHandler() {
17752 if (seekTo) {
17753 player.currentTime(seekTo);
17754 }
17755 };
17756
17757 /**
17758 * Set the source on the player element, play, and seek if necessary
17759 *
17760 * @param {Object} sourceObj An object specifying the source url and mime-type to play
17761 * @private
17762 */
17763 var setSource = function setSource(sourceObj) {
17764 if (sourceObj === null || sourceObj === undefined) {
17765 return;
17766 }
17767 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
17768
17769 player.one('loadedmetadata', loadedMetadataHandler);
17770
17771 player.src(sourceObj);
17772 player.trigger({ type: 'usage', name: 'hls-error-reload' });
17773 player.play();
17774 };
17775
17776 /**
17777 * Attempt to get a source from either the built-in getSource function
17778 * or a custom function provided via the options
17779 *
17780 * @private
17781 */
17782 var errorHandler = function errorHandler() {
17783 // Do not attempt to reload the source if a source-reload occurred before
17784 // 'errorInterval' time has elapsed since the last source-reload
17785 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
17786 player.trigger({ type: 'usage', name: 'hls-error-reload-canceled' });
17787 return;
17788 }
17789
17790 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
17791 videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
17792 return;
17793 }
17794 lastCalled = Date.now();
17795
17796 return localOptions.getSource.call(player, setSource);
17797 };
17798
17799 /**
17800 * Unbind any event handlers that were bound by the plugin
17801 *
17802 * @private
17803 */
17804 var cleanupEvents = function cleanupEvents() {
17805 player.off('loadedmetadata', loadedMetadataHandler);
17806 player.off('error', errorHandler);
17807 player.off('dispose', cleanupEvents);
17808 };
17809
17810 /**
17811 * Cleanup before re-initializing the plugin
17812 *
17813 * @param {Object} [newOptions] an object with plugin options
17814 * @private
17815 */
17816 var reinitPlugin = function reinitPlugin(newOptions) {
17817 cleanupEvents();
17818 initPlugin(player, newOptions);
17819 };
17820
17821 player.on('error', errorHandler);
17822 player.on('dispose', cleanupEvents);
17823
17824 // Overwrite the plugin function so that we can correctly cleanup before
17825 // initializing the plugin
17826 player.reloadSourceOnError = reinitPlugin;
17827};
17828
17829/**
17830 * Reload the source when an error is detected as long as there
17831 * wasn't an error previously within the last 30 seconds
17832 *
17833 * @param {Object} [options] an object with plugin options
17834 */
17835var reloadSourceOnError = function reloadSourceOnError(options) {
17836 initPlugin(this, options);
17837};
17838
17839var version = "1.13.4";
17840
17841/**
17842 * @file videojs-http-streaming.js
17843 *
17844 * The main file for the HLS project.
17845 * License: https://github.com/videojs/videojs-http-streaming/blob/master/LICENSE
17846 */
17847
17848var Hls$1 = {
17849 PlaylistLoader: PlaylistLoader,
17850 Playlist: Playlist,
17851 Decrypter: Decrypter,
17852 AsyncStream: AsyncStream,
17853 decrypt: decrypt,
17854 utils: utils,
17855
17856 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
17857 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
17858 comparePlaylistBandwidth: comparePlaylistBandwidth,
17859 comparePlaylistResolution: comparePlaylistResolution,
17860
17861 xhr: xhrFactory()
17862};
17863
17864// Define getter/setters for config properites
17865['GOAL_BUFFER_LENGTH', 'MAX_GOAL_BUFFER_LENGTH', 'GOAL_BUFFER_LENGTH_RATE', 'BUFFER_LOW_WATER_LINE', 'MAX_BUFFER_LOW_WATER_LINE', 'BUFFER_LOW_WATER_LINE_RATE', 'BANDWIDTH_VARIANCE'].forEach(function (prop) {
17866 Object.defineProperty(Hls$1, prop, {
17867 get: function get$$1() {
17868 videojs.log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
17869 return Config[prop];
17870 },
17871 set: function set$$1(value) {
17872 videojs.log.warn('using Hls.' + prop + ' is UNSAFE be sure you know what you are doing');
17873
17874 if (typeof value !== 'number' || value < 0) {
17875 videojs.log.warn('value of Hls.' + prop + ' must be greater than or equal to 0');
17876 return;
17877 }
17878
17879 Config[prop] = value;
17880 }
17881 });
17882});
17883
17884var LOCAL_STORAGE_KEY = 'videojs-vhs';
17885
17886var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
17887 var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
17888
17889 if (mpegurlRE.test(type)) {
17890 return 'hls';
17891 }
17892
17893 var dashRE = /^application\/dash\+xml/i;
17894
17895 if (dashRE.test(type)) {
17896 return 'dash';
17897 }
17898
17899 return null;
17900};
17901
17902/**
17903 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.
17904 *
17905 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
17906 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
17907 * @function handleHlsMediaChange
17908 */
17909var handleHlsMediaChange = function handleHlsMediaChange(qualityLevels, playlistLoader) {
17910 var newPlaylist = playlistLoader.media();
17911 var selectedIndex = -1;
17912
17913 for (var i = 0; i < qualityLevels.length; i++) {
17914 if (qualityLevels[i].id === newPlaylist.id) {
17915 selectedIndex = i;
17916 break;
17917 }
17918 }
17919
17920 qualityLevels.selectedIndex_ = selectedIndex;
17921 qualityLevels.trigger({
17922 selectedIndex: selectedIndex,
17923 type: 'change'
17924 });
17925};
17926
17927/**
17928 * Adds quality levels to list once playlist metadata is available
17929 *
17930 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
17931 * @param {Object} hls Hls object to listen to for media events.
17932 * @function handleHlsLoadedMetadata
17933 */
17934var handleHlsLoadedMetadata = function handleHlsLoadedMetadata(qualityLevels, hls) {
17935 hls.representations().forEach(function (rep) {
17936 qualityLevels.addQualityLevel(rep);
17937 });
17938 handleHlsMediaChange(qualityLevels, hls.playlists);
17939};
17940
17941// HLS is a source handler, not a tech. Make sure attempts to use it
17942// as one do not cause exceptions.
17943Hls$1.canPlaySource = function () {
17944 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
17945};
17946
17947var emeKeySystems = function emeKeySystems(keySystemOptions, mainSegmentLoader, audioSegmentLoader) {
17948 if (!keySystemOptions) {
17949 return keySystemOptions;
17950 }
17951
17952 var videoMimeType = void 0;
17953 var audioMimeType = void 0;
17954
17955 // if there is a mimeType associated with the audioSegmentLoader, then the audio
17956 // and video mimeType and codec strings are already in the format we need to
17957 // pass with the other key systems
17958 if (audioSegmentLoader.mimeType_) {
17959 videoMimeType = mainSegmentLoader.mimeType_;
17960 audioMimeType = audioSegmentLoader.mimeType_;
17961
17962 // if there is no audioSegmentLoader mimeType, then we have to create the
17963 // the audio and video mimeType/codec strings from information extrapolated
17964 // from the mainSegmentLoader mimeType (ex. 'video/mp4; codecs="mp4, avc1"' -->
17965 // 'video/mp4; codecs="avc1"' and 'audio/mp4; codecs="mp4"')
17966 } else {
17967 var parsedMimeType = parseContentType(mainSegmentLoader.mimeType_);
17968 var codecs = parsedMimeType.parameters.codecs.split(',');
17969
17970 var audioCodec = void 0;
17971 var videoCodec = void 0;
17972
17973 codecs.forEach(function (codec) {
17974 codec = codec.trim();
17975
17976 if (isAudioCodec(codec)) {
17977 audioCodec = codec;
17978 } else if (isVideoCodec(codec)) {
17979 videoCodec = codec;
17980 }
17981 });
17982
17983 videoMimeType = parsedMimeType.type + '; codecs="' + videoCodec + '"';
17984 audioMimeType = parsedMimeType.type.replace('video', 'audio') + '; codecs="' + audioCodec + '"';
17985 }
17986
17987 // upsert the content types based on the selected playlist
17988 var keySystemContentTypes = {};
17989 var videoPlaylist = mainSegmentLoader.playlist_;
17990
17991 for (var keySystem in keySystemOptions) {
17992 keySystemContentTypes[keySystem] = {
17993 audioContentType: audioMimeType,
17994 videoContentType: videoMimeType
17995 };
17996
17997 if (videoPlaylist.contentProtection && videoPlaylist.contentProtection[keySystem] && videoPlaylist.contentProtection[keySystem].pssh) {
17998 keySystemContentTypes[keySystem].pssh = videoPlaylist.contentProtection[keySystem].pssh;
17999 }
18000
18001 // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
18002 // so we need to prevent overwriting the URL entirely
18003 if (typeof keySystemOptions[keySystem] === 'string') {
18004 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
18005 }
18006 }
18007
18008 return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
18009};
18010
18011var setupEmeOptions = function setupEmeOptions(hlsHandler) {
18012 var mainSegmentLoader = hlsHandler.masterPlaylistController_.mainSegmentLoader_;
18013 var audioSegmentLoader = hlsHandler.masterPlaylistController_.audioSegmentLoader_;
18014
18015 var player = videojs.players[hlsHandler.tech_.options_.playerId];
18016
18017 if (player.eme) {
18018 var sourceOptions = emeKeySystems(hlsHandler.source_.keySystems, mainSegmentLoader, audioSegmentLoader);
18019
18020 if (sourceOptions) {
18021 player.currentSource().keySystems = sourceOptions;
18022
18023 // Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449
18024 // in non-IE11 browsers. In IE11 this is too early to initialize media keys
18025 if (!(videojs.browser.IE_VERSION === 11) && player.eme.initializeMediaKeys) {
18026 player.eme.initializeMediaKeys();
18027 }
18028 }
18029 }
18030};
18031
18032var getVhsLocalStorage = function getVhsLocalStorage() {
18033 if (!window.localStorage) {
18034 return null;
18035 }
18036
18037 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
18038
18039 if (!storedObject) {
18040 return null;
18041 }
18042
18043 try {
18044 return JSON.parse(storedObject);
18045 } catch (e) {
18046 // someone may have tampered with the value
18047 return null;
18048 }
18049};
18050
18051var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
18052 if (!window.localStorage) {
18053 return false;
18054 }
18055
18056 var objectToStore = getVhsLocalStorage();
18057
18058 objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
18059
18060 try {
18061 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
18062 } catch (e) {
18063 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
18064 // storage is set to 0).
18065 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
18066 // No need to perform any operation.
18067 return false;
18068 }
18069
18070 return objectToStore;
18071};
18072
18073/**
18074 * Whether the browser has built-in HLS support.
18075 */
18076Hls$1.supportsNativeHls = function () {
18077 var video = document.createElement('video');
18078
18079 // native HLS is definitely not supported if HTML5 video isn't
18080 if (!videojs.getTech('Html5').isSupported()) {
18081 return false;
18082 }
18083
18084 // HLS manifests can go by many mime-types
18085 var canPlay = [
18086 // Apple santioned
18087 'application/vnd.apple.mpegurl',
18088 // Apple sanctioned for backwards compatibility
18089 'audio/mpegurl',
18090 // Very common
18091 'audio/x-mpegurl',
18092 // Very common
18093 'application/x-mpegurl',
18094 // Included for completeness
18095 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
18096
18097 return canPlay.some(function (canItPlay) {
18098 return (/maybe|probably/i.test(video.canPlayType(canItPlay))
18099 );
18100 });
18101}();
18102
18103Hls$1.supportsNativeDash = function () {
18104 if (!videojs.getTech('Html5').isSupported()) {
18105 return false;
18106 }
18107
18108 return (/maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'))
18109 );
18110}();
18111
18112Hls$1.supportsTypeNatively = function (type) {
18113 if (type === 'hls') {
18114 return Hls$1.supportsNativeHls;
18115 }
18116
18117 if (type === 'dash') {
18118 return Hls$1.supportsNativeDash;
18119 }
18120
18121 return false;
18122};
18123
18124/**
18125 * HLS is a source handler, not a tech. Make sure attempts to use it
18126 * as one do not cause exceptions.
18127 */
18128Hls$1.isSupported = function () {
18129 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
18130};
18131
18132var Component = videojs.getComponent('Component');
18133
18134/**
18135 * The Hls Handler object, where we orchestrate all of the parts
18136 * of HLS to interact with video.js
18137 *
18138 * @class HlsHandler
18139 * @extends videojs.Component
18140 * @param {Object} source the soruce object
18141 * @param {Tech} tech the parent tech object
18142 * @param {Object} options optional and required options
18143 */
18144
18145var HlsHandler = function (_Component) {
18146 inherits(HlsHandler, _Component);
18147
18148 function HlsHandler(source, tech, options) {
18149 classCallCheck(this, HlsHandler);
18150
18151 // tech.player() is deprecated but setup a reference to HLS for
18152 // backwards-compatibility
18153 var _this = possibleConstructorReturn(this, (HlsHandler.__proto__ || Object.getPrototypeOf(HlsHandler)).call(this, tech, options.hls));
18154
18155 if (tech.options_ && tech.options_.playerId) {
18156 var _player = videojs(tech.options_.playerId);
18157
18158 if (!_player.hasOwnProperty('hls')) {
18159 Object.defineProperty(_player, 'hls', {
18160 get: function get$$1() {
18161 videojs.log.warn('player.hls is deprecated. Use player.tech().hls instead.');
18162 tech.trigger({ type: 'usage', name: 'hls-player-access' });
18163 return _this;
18164 },
18165 configurable: true
18166 });
18167 }
18168
18169 // Set up a reference to the HlsHandler from player.vhs. This allows users to start
18170 // migrating from player.tech_.hls... to player.vhs... for API access. Although this
18171 // isn't the most appropriate form of reference for video.js (since all APIs should
18172 // be provided through core video.js), it is a common pattern for plugins, and vhs
18173 // will act accordingly.
18174 _player.vhs = _this;
18175 // deprecated, for backwards compatibility
18176 _player.dash = _this;
18177
18178 _this.player_ = _player;
18179 }
18180
18181 _this.tech_ = tech;
18182 _this.source_ = source;
18183 _this.stats = {};
18184 _this.ignoreNextSeekingEvent_ = false;
18185 _this.setOptions_();
18186
18187 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
18188 tech.overrideNativeAudioTracks(true);
18189 tech.overrideNativeVideoTracks(true);
18190 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
18191 // overriding native HLS only works if audio tracks have been emulated
18192 // error early if we're misconfigured
18193 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
18194 }
18195
18196 // listen for fullscreenchange events for this player so that we
18197 // can adjust our quality selection quickly
18198 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
18199 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
18200
18201 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
18202 _this.masterPlaylistController_.smoothQualityChange_();
18203 }
18204 });
18205
18206 _this.on(_this.tech_, 'seeking', function () {
18207 if (this.ignoreNextSeekingEvent_) {
18208 this.ignoreNextSeekingEvent_ = false;
18209 return;
18210 }
18211
18212 this.setCurrentTime(this.tech_.currentTime());
18213 });
18214
18215 _this.on(_this.tech_, 'error', function () {
18216 if (this.masterPlaylistController_) {
18217 this.masterPlaylistController_.pauseLoading();
18218 }
18219 });
18220
18221 _this.on(_this.tech_, 'play', _this.play);
18222 return _this;
18223 }
18224
18225 createClass(HlsHandler, [{
18226 key: 'setOptions_',
18227 value: function setOptions_() {
18228 var _this2 = this;
18229
18230 // defaults
18231 this.options_.withCredentials = this.options_.withCredentials || false;
18232 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects || false;
18233 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
18234 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
18235 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
18236 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
18237 this.options_.customTagParsers = this.options_.customTagParsers || [];
18238 this.options_.customTagMappers = this.options_.customTagMappers || [];
18239 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
18240
18241 if (typeof this.options_.blacklistDuration !== 'number') {
18242 this.options_.blacklistDuration = 5 * 60;
18243 }
18244
18245 if (typeof this.options_.bandwidth !== 'number') {
18246 if (this.options_.useBandwidthFromLocalStorage) {
18247 var storedObject = getVhsLocalStorage();
18248
18249 if (storedObject && storedObject.bandwidth) {
18250 this.options_.bandwidth = storedObject.bandwidth;
18251 this.tech_.trigger({ type: 'usage', name: 'hls-bandwidth-from-local-storage' });
18252 }
18253 if (storedObject && storedObject.throughput) {
18254 this.options_.throughput = storedObject.throughput;
18255 this.tech_.trigger({ type: 'usage', name: 'hls-throughput-from-local-storage' });
18256 }
18257 }
18258 }
18259 // if bandwidth was not set by options or pulled from local storage, start playlist
18260 // selection at a reasonable bandwidth
18261 if (typeof this.options_.bandwidth !== 'number') {
18262 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
18263 }
18264
18265 // If the bandwidth number is unchanged from the initial setting
18266 // then this takes precedence over the enableLowInitialPlaylist option
18267 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH;
18268
18269 // grab options passed to player.src
18270 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys'].forEach(function (option) {
18271 if (typeof _this2.source_[option] !== 'undefined') {
18272 _this2.options_[option] = _this2.source_[option];
18273 }
18274 });
18275
18276 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
18277 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
18278 }
18279 /**
18280 * called when player.src gets called, handle a new source
18281 *
18282 * @param {Object} src the source object to handle
18283 */
18284
18285 }, {
18286 key: 'src',
18287 value: function src(_src, type) {
18288 var _this3 = this;
18289
18290 // do nothing if the src is falsey
18291 if (!_src) {
18292 return;
18293 }
18294 this.setOptions_();
18295 // add master playlist controller options
18296 this.options_.url = this.source_.src;
18297 this.options_.tech = this.tech_;
18298 this.options_.externHls = Hls$1;
18299 this.options_.sourceType = simpleTypeFromSourceType(type);
18300 // Whenever we seek internally, we should update the tech
18301 this.options_.seekTo = function (time) {
18302 _this3.tech_.setCurrentTime(time);
18303 };
18304
18305 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
18306 this.playbackWatcher_ = new PlaybackWatcher(videojs.mergeOptions(this.options_, {
18307 seekable: function seekable$$1() {
18308 return _this3.seekable();
18309 },
18310 media: function media() {
18311 return _this3.masterPlaylistController_.media();
18312 }
18313 }));
18314
18315 this.masterPlaylistController_.on('error', function () {
18316 var player = videojs.players[_this3.tech_.options_.playerId];
18317
18318 player.error(_this3.masterPlaylistController_.error);
18319 });
18320
18321 // `this` in selectPlaylist should be the HlsHandler for backwards
18322 // compatibility with < v2
18323 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : Hls$1.STANDARD_PLAYLIST_SELECTOR.bind(this);
18324
18325 this.masterPlaylistController_.selectInitialPlaylist = Hls$1.INITIAL_PLAYLIST_SELECTOR.bind(this);
18326
18327 // re-expose some internal objects for backwards compatibility with < v2
18328 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
18329 this.mediaSource = this.masterPlaylistController_.mediaSource;
18330
18331 // Proxy assignment of some properties to the master playlist
18332 // controller. Using a custom property for backwards compatibility
18333 // with < v2
18334 Object.defineProperties(this, {
18335 selectPlaylist: {
18336 get: function get$$1() {
18337 return this.masterPlaylistController_.selectPlaylist;
18338 },
18339 set: function set$$1(selectPlaylist) {
18340 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
18341 }
18342 },
18343 throughput: {
18344 get: function get$$1() {
18345 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
18346 },
18347 set: function set$$1(throughput) {
18348 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput;
18349 // By setting `count` to 1 the throughput value becomes the starting value
18350 // for the cumulative average
18351 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
18352 }
18353 },
18354 bandwidth: {
18355 get: function get$$1() {
18356 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
18357 },
18358 set: function set$$1(bandwidth) {
18359 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth;
18360 // setting the bandwidth manually resets the throughput counter
18361 // `count` is set to zero that current value of `rate` isn't included
18362 // in the cumulative average
18363 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
18364 rate: 0,
18365 count: 0
18366 };
18367 }
18368 },
18369 /**
18370 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
18371 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
18372 * the entire process after that - decryption, transmuxing, and appending - provided
18373 * by `throughput`.
18374 *
18375 * Since the two process are serial, the overall system bandwidth is given by:
18376 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
18377 */
18378 systemBandwidth: {
18379 get: function get$$1() {
18380 var invBandwidth = 1 / (this.bandwidth || 1);
18381 var invThroughput = void 0;
18382
18383 if (this.throughput > 0) {
18384 invThroughput = 1 / this.throughput;
18385 } else {
18386 invThroughput = 0;
18387 }
18388
18389 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
18390
18391 return systemBitrate;
18392 },
18393 set: function set$$1() {
18394 videojs.log.error('The "systemBandwidth" property is read-only');
18395 }
18396 }
18397 });
18398
18399 if (this.options_.bandwidth) {
18400 this.bandwidth = this.options_.bandwidth;
18401 }
18402 if (this.options_.throughput) {
18403 this.throughput = this.options_.throughput;
18404 }
18405
18406 Object.defineProperties(this.stats, {
18407 bandwidth: {
18408 get: function get$$1() {
18409 return _this3.bandwidth || 0;
18410 },
18411 enumerable: true
18412 },
18413 mediaRequests: {
18414 get: function get$$1() {
18415 return _this3.masterPlaylistController_.mediaRequests_() || 0;
18416 },
18417 enumerable: true
18418 },
18419 mediaRequestsAborted: {
18420 get: function get$$1() {
18421 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
18422 },
18423 enumerable: true
18424 },
18425 mediaRequestsTimedout: {
18426 get: function get$$1() {
18427 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
18428 },
18429 enumerable: true
18430 },
18431 mediaRequestsErrored: {
18432 get: function get$$1() {
18433 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
18434 },
18435 enumerable: true
18436 },
18437 mediaTransferDuration: {
18438 get: function get$$1() {
18439 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
18440 },
18441 enumerable: true
18442 },
18443 mediaBytesTransferred: {
18444 get: function get$$1() {
18445 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
18446 },
18447 enumerable: true
18448 },
18449 mediaSecondsLoaded: {
18450 get: function get$$1() {
18451 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
18452 },
18453 enumerable: true
18454 },
18455 buffered: {
18456 get: function get$$1() {
18457 return timeRangesToArray(_this3.tech_.buffered());
18458 },
18459 enumerable: true
18460 },
18461 currentTime: {
18462 get: function get$$1() {
18463 return _this3.tech_.currentTime();
18464 },
18465 enumerable: true
18466 },
18467 currentSource: {
18468 get: function get$$1() {
18469 return _this3.tech_.currentSource_;
18470 },
18471 enumerable: true
18472 },
18473 currentTech: {
18474 get: function get$$1() {
18475 return _this3.tech_.name_;
18476 },
18477 enumerable: true
18478 },
18479 duration: {
18480 get: function get$$1() {
18481 return _this3.tech_.duration();
18482 },
18483 enumerable: true
18484 },
18485 master: {
18486 get: function get$$1() {
18487 return _this3.playlists.master;
18488 },
18489 enumerable: true
18490 },
18491 playerDimensions: {
18492 get: function get$$1() {
18493 return _this3.tech_.currentDimensions();
18494 },
18495 enumerable: true
18496 },
18497 seekable: {
18498 get: function get$$1() {
18499 return timeRangesToArray(_this3.tech_.seekable());
18500 },
18501 enumerable: true
18502 },
18503 timestamp: {
18504 get: function get$$1() {
18505 return Date.now();
18506 },
18507 enumerable: true
18508 },
18509 videoPlaybackQuality: {
18510 get: function get$$1() {
18511 return _this3.tech_.getVideoPlaybackQuality();
18512 },
18513 enumerable: true
18514 }
18515 });
18516
18517 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
18518
18519 this.tech_.on('bandwidthupdate', function () {
18520 if (_this3.options_.useBandwidthFromLocalStorage) {
18521 updateVhsLocalStorage({
18522 bandwidth: _this3.bandwidth,
18523 throughput: Math.round(_this3.throughput)
18524 });
18525 }
18526 });
18527
18528 this.masterPlaylistController_.on('selectedinitialmedia', function () {
18529 // Add the manual rendition mix-in to HlsHandler
18530 renditionSelectionMixin(_this3);
18531 setupEmeOptions(_this3);
18532 });
18533
18534 // the bandwidth of the primary segment loader is our best
18535 // estimate of overall bandwidth
18536 this.on(this.masterPlaylistController_, 'progress', function () {
18537 this.tech_.trigger('progress');
18538 });
18539
18540 // In the live case, we need to ignore the very first `seeking` event since
18541 // that will be the result of the seek-to-live behavior
18542 this.on(this.masterPlaylistController_, 'firstplay', function () {
18543 this.ignoreNextSeekingEvent_ = true;
18544 });
18545
18546 this.setupQualityLevels_();
18547
18548 // do nothing if the tech has been disposed already
18549 // this can occur if someone sets the src in player.ready(), for instance
18550 if (!this.tech_.el()) {
18551 return;
18552 }
18553
18554 this.tech_.src(videojs.URL.createObjectURL(this.masterPlaylistController_.mediaSource));
18555 }
18556
18557 /**
18558 * Initializes the quality levels and sets listeners to update them.
18559 *
18560 * @method setupQualityLevels_
18561 * @private
18562 */
18563
18564 }, {
18565 key: 'setupQualityLevels_',
18566 value: function setupQualityLevels_() {
18567 var _this4 = this;
18568
18569 var player = videojs.players[this.tech_.options_.playerId];
18570
18571 // if there isn't a player or there isn't a qualityLevels plugin
18572 // or qualityLevels_ listeners have already been setup, do nothing.
18573 if (!player || !player.qualityLevels || this.qualityLevels_) {
18574 return;
18575 }
18576
18577 this.qualityLevels_ = player.qualityLevels();
18578
18579 this.masterPlaylistController_.on('selectedinitialmedia', function () {
18580 handleHlsLoadedMetadata(_this4.qualityLevels_, _this4);
18581 });
18582
18583 this.playlists.on('mediachange', function () {
18584 handleHlsMediaChange(_this4.qualityLevels_, _this4.playlists);
18585 });
18586 }
18587
18588 /**
18589 * Begin playing the video.
18590 */
18591
18592 }, {
18593 key: 'play',
18594 value: function play() {
18595 this.masterPlaylistController_.play();
18596 }
18597
18598 /**
18599 * a wrapper around the function in MasterPlaylistController
18600 */
18601
18602 }, {
18603 key: 'setCurrentTime',
18604 value: function setCurrentTime(currentTime) {
18605 this.masterPlaylistController_.setCurrentTime(currentTime);
18606 }
18607
18608 /**
18609 * a wrapper around the function in MasterPlaylistController
18610 */
18611
18612 }, {
18613 key: 'duration',
18614 value: function duration$$1() {
18615 return this.masterPlaylistController_.duration();
18616 }
18617
18618 /**
18619 * a wrapper around the function in MasterPlaylistController
18620 */
18621
18622 }, {
18623 key: 'seekable',
18624 value: function seekable$$1() {
18625 return this.masterPlaylistController_.seekable();
18626 }
18627
18628 /**
18629 * Abort all outstanding work and cleanup.
18630 */
18631
18632 }, {
18633 key: 'dispose',
18634 value: function dispose() {
18635 if (this.playbackWatcher_) {
18636 this.playbackWatcher_.dispose();
18637 }
18638 if (this.masterPlaylistController_) {
18639 this.masterPlaylistController_.dispose();
18640 }
18641 if (this.qualityLevels_) {
18642 this.qualityLevels_.dispose();
18643 }
18644
18645 if (this.player_) {
18646 delete this.player_.vhs;
18647 delete this.player_.dash;
18648 delete this.player_.hls;
18649 }
18650
18651 if (this.tech_ && this.tech_.hls) {
18652 delete this.tech_.hls;
18653 }
18654
18655 get(HlsHandler.prototype.__proto__ || Object.getPrototypeOf(HlsHandler.prototype), 'dispose', this).call(this);
18656 }
18657 }, {
18658 key: 'convertToProgramTime',
18659 value: function convertToProgramTime(time, callback) {
18660 return getProgramTime({
18661 playlist: this.masterPlaylistController_.media(),
18662 time: time,
18663 callback: callback
18664 });
18665 }
18666
18667 // the player must be playing before calling this
18668
18669 }, {
18670 key: 'seekToProgramTime',
18671 value: function seekToProgramTime$$1(programTime, callback) {
18672 var pauseAfterSeek = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
18673 var retryCount = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 2;
18674
18675 return seekToProgramTime({
18676 programTime: programTime,
18677 playlist: this.masterPlaylistController_.media(),
18678 retryCount: retryCount,
18679 pauseAfterSeek: pauseAfterSeek,
18680 seekTo: this.options_.seekTo,
18681 tech: this.options_.tech,
18682 callback: callback
18683 });
18684 }
18685 }]);
18686 return HlsHandler;
18687}(Component);
18688
18689/**
18690 * The Source Handler object, which informs video.js what additional
18691 * MIME types are supported and sets up playback. It is registered
18692 * automatically to the appropriate tech based on the capabilities of
18693 * the browser it is running in. It is not necessary to use or modify
18694 * this object in normal usage.
18695 */
18696
18697
18698var HlsSourceHandler = {
18699 name: 'videojs-http-streaming',
18700 VERSION: version,
18701 canHandleSource: function canHandleSource(srcObj) {
18702 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
18703
18704 var localOptions = videojs.mergeOptions(videojs.options, options);
18705
18706 return HlsSourceHandler.canPlayType(srcObj.type, localOptions);
18707 },
18708 handleSource: function handleSource(source, tech) {
18709 var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
18710
18711 var localOptions = videojs.mergeOptions(videojs.options, options);
18712
18713 tech.hls = new HlsHandler(source, tech, localOptions);
18714 tech.hls.xhr = xhrFactory();
18715
18716 tech.hls.src(source.src, source.type);
18717 return tech.hls;
18718 },
18719 canPlayType: function canPlayType(type) {
18720 var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
18721
18722 var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
18723 overrideNative = _videojs$mergeOptions.hls.overrideNative;
18724
18725 var supportedType = simpleTypeFromSourceType(type);
18726 var canUseMsePlayback = supportedType && (!Hls$1.supportsTypeNatively(supportedType) || overrideNative);
18727
18728 return canUseMsePlayback ? 'maybe' : '';
18729 }
18730};
18731
18732if (typeof videojs.MediaSource === 'undefined' || typeof videojs.URL === 'undefined') {
18733 videojs.MediaSource = MediaSource;
18734 videojs.URL = URL$1;
18735}
18736
18737// register source handlers with the appropriate techs
18738if (MediaSource.supportsNativeMediaSources()) {
18739 videojs.getTech('Html5').registerSourceHandler(HlsSourceHandler, 0);
18740}
18741
18742videojs.HlsHandler = HlsHandler;
18743videojs.HlsSourceHandler = HlsSourceHandler;
18744videojs.Hls = Hls$1;
18745if (!videojs.use) {
18746 videojs.registerComponent('Hls', Hls$1);
18747}
18748videojs.options.hls = videojs.options.hls || {};
18749
18750if (videojs.registerPlugin) {
18751 videojs.registerPlugin('reloadSourceOnError', reloadSourceOnError);
18752} else {
18753 videojs.plugin('reloadSourceOnError', reloadSourceOnError);
18754}
18755
18756export { LOCAL_STORAGE_KEY, Hls$1 as Hls, HlsHandler, HlsSourceHandler, emeKeySystems, simpleTypeFromSourceType };