UNPKG

1.06 MBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.7.0 @license Apache-2.0 */
2(function (global, factory) {
3 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('global/document'), require('global/window'), require('video.js'), require('xmldom')) :
4 typeof define === 'function' && define.amd ? define(['exports', 'global/document', 'global/window', 'video.js', 'xmldom'], factory) :
5 (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.httpStreaming = {}, global.document, global.window, global.videojs, global.window));
6}(this, (function (exports, document, window, videojs, xmldom) { 'use strict';
7
8 function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
10 var document__default = /*#__PURE__*/_interopDefaultLegacy(document);
11 var window__default = /*#__PURE__*/_interopDefaultLegacy(window);
12 var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
13
14 function createCommonjsModule(fn, basedir, module) {
15 return module = {
16 path: basedir,
17 exports: {},
18 require: function (path, base) {
19 return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
20 }
21 }, fn(module, module.exports), module.exports;
22 }
23
24 function commonjsRequire () {
25 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
26 }
27
28 var assertThisInitialized = createCommonjsModule(function (module) {
29 function _assertThisInitialized(self) {
30 if (self === void 0) {
31 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
32 }
33
34 return self;
35 }
36
37 module.exports = _assertThisInitialized;
38 module.exports["default"] = module.exports, module.exports.__esModule = true;
39 });
40
41 var setPrototypeOf = createCommonjsModule(function (module) {
42 function _setPrototypeOf(o, p) {
43 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
44 o.__proto__ = p;
45 return o;
46 };
47
48 module.exports["default"] = module.exports, module.exports.__esModule = true;
49 return _setPrototypeOf(o, p);
50 }
51
52 module.exports = _setPrototypeOf;
53 module.exports["default"] = module.exports, module.exports.__esModule = true;
54 });
55
56 var inheritsLoose = createCommonjsModule(function (module) {
57 function _inheritsLoose(subClass, superClass) {
58 subClass.prototype = Object.create(superClass.prototype);
59 subClass.prototype.constructor = subClass;
60 setPrototypeOf(subClass, superClass);
61 }
62
63 module.exports = _inheritsLoose;
64 module.exports["default"] = module.exports, module.exports.__esModule = true;
65 });
66
67 var urlToolkit = createCommonjsModule(function (module, exports) {
68 // see https://tools.ietf.org/html/rfc1808
69 (function (root) {
70 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#.*)?$/;
71 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)(.*)$/;
72 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
73 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
74 var URLToolkit = {
75 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
76 // E.g
77 // With opts.alwaysNormalize = false (default, spec compliant)
78 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
79 // With opts.alwaysNormalize = true (not spec compliant)
80 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
81 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
82 opts = opts || {}; // remove any remaining space and CRLF
83
84 baseURL = baseURL.trim();
85 relativeURL = relativeURL.trim();
86
87 if (!relativeURL) {
88 // 2a) If the embedded URL is entirely empty, it inherits the
89 // entire base URL (i.e., is set equal to the base URL)
90 // and we are done.
91 if (!opts.alwaysNormalize) {
92 return baseURL;
93 }
94
95 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
96
97 if (!basePartsForNormalise) {
98 throw new Error('Error trying to parse base URL.');
99 }
100
101 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
102 return URLToolkit.buildURLFromParts(basePartsForNormalise);
103 }
104
105 var relativeParts = URLToolkit.parseURL(relativeURL);
106
107 if (!relativeParts) {
108 throw new Error('Error trying to parse relative URL.');
109 }
110
111 if (relativeParts.scheme) {
112 // 2b) If the embedded URL starts with a scheme name, it is
113 // interpreted as an absolute URL and we are done.
114 if (!opts.alwaysNormalize) {
115 return relativeURL;
116 }
117
118 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
119 return URLToolkit.buildURLFromParts(relativeParts);
120 }
121
122 var baseParts = URLToolkit.parseURL(baseURL);
123
124 if (!baseParts) {
125 throw new Error('Error trying to parse base URL.');
126 }
127
128 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
129 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
130 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
131 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
132 baseParts.netLoc = pathParts[1];
133 baseParts.path = pathParts[2];
134 }
135
136 if (baseParts.netLoc && !baseParts.path) {
137 baseParts.path = '/';
138 }
139
140 var builtParts = {
141 // 2c) Otherwise, the embedded URL inherits the scheme of
142 // the base URL.
143 scheme: baseParts.scheme,
144 netLoc: relativeParts.netLoc,
145 path: null,
146 params: relativeParts.params,
147 query: relativeParts.query,
148 fragment: relativeParts.fragment
149 };
150
151 if (!relativeParts.netLoc) {
152 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
153 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
154 // (if any) of the base URL.
155 builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
156 // path is not relative and we skip to Step 7.
157
158 if (relativeParts.path[0] !== '/') {
159 if (!relativeParts.path) {
160 // 5) If the embedded URL path is empty (and not preceded by a
161 // slash), then the embedded URL inherits the base URL path
162 builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
163 // step 7; otherwise, it inherits the <params> of the base
164 // URL (if any) and
165
166 if (!relativeParts.params) {
167 builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
168 // step 7; otherwise, it inherits the <query> of the base
169 // URL (if any) and we skip to step 7.
170
171 if (!relativeParts.query) {
172 builtParts.query = baseParts.query;
173 }
174 }
175 } else {
176 // 6) The last segment of the base URL's path (anything
177 // following the rightmost slash "/", or the entire path if no
178 // slash is present) is removed and the embedded URL's path is
179 // appended in its place.
180 var baseURLPath = baseParts.path;
181 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
182 builtParts.path = URLToolkit.normalizePath(newPath);
183 }
184 }
185 }
186
187 if (builtParts.path === null) {
188 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
189 }
190
191 return URLToolkit.buildURLFromParts(builtParts);
192 },
193 parseURL: function parseURL(url) {
194 var parts = URL_REGEX.exec(url);
195
196 if (!parts) {
197 return null;
198 }
199
200 return {
201 scheme: parts[1] || '',
202 netLoc: parts[2] || '',
203 path: parts[3] || '',
204 params: parts[4] || '',
205 query: parts[5] || '',
206 fragment: parts[6] || ''
207 };
208 },
209 normalizePath: function normalizePath(path) {
210 // The following operations are
211 // then applied, in order, to the new path:
212 // 6a) All occurrences of "./", where "." is a complete path
213 // segment, are removed.
214 // 6b) If the path ends with "." as a complete path segment,
215 // that "." is removed.
216 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
217 // complete path segment not equal to "..", are removed.
218 // Removal of these path segments is performed iteratively,
219 // removing the leftmost matching pattern on each iteration,
220 // until no matching pattern remains.
221 // 6d) If the path ends with "<segment>/..", where <segment> is a
222 // complete path segment not equal to "..", that
223 // "<segment>/.." is removed.
224
225 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
226
227 return path.split('').reverse().join('');
228 },
229 buildURLFromParts: function buildURLFromParts(parts) {
230 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
231 }
232 };
233 module.exports = URLToolkit;
234 })();
235 });
236
237 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
238 // return early if we don't need to resolve
239 if (/^[a-z]+:/i.test(relativeUrl)) {
240 return relativeUrl;
241 } // if the base URL is relative then combine with the current location
242
243
244 if (!/\/\//i.test(baseUrl)) {
245 baseUrl = urlToolkit.buildAbsoluteURL(window__default['default'].location && window__default['default'].location.href || '', baseUrl);
246 }
247
248 return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
249 };
250
251 /**
252 * @file resolve-url.js - Handling how URLs are resolved and manipulated
253 */
254 var resolveUrl = resolveUrl$1;
255 /**
256 * Checks whether xhr request was redirected and returns correct url depending
257 * on `handleManifestRedirects` option
258 *
259 * @api private
260 *
261 * @param {string} url - an url being requested
262 * @param {XMLHttpRequest} req - xhr request result
263 *
264 * @return {string}
265 */
266
267 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
268 // To understand how the responseURL below is set and generated:
269 // - https://fetch.spec.whatwg.org/#concept-response-url
270 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
271 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
272 return req.responseURL;
273 }
274
275 return url;
276 };
277
278 var logger = function logger(source) {
279 if (videojs__default['default'].log.debug) {
280 return videojs__default['default'].log.debug.bind(videojs__default['default'], 'VHS:', source + " >");
281 }
282
283 return function () {};
284 };
285
286 /**
287 * @file stream.js
288 */
289
290 /**
291 * A lightweight readable stream implemention that handles event dispatching.
292 *
293 * @class Stream
294 */
295 var Stream$1 = /*#__PURE__*/function () {
296 function Stream() {
297 this.listeners = {};
298 }
299 /**
300 * Add a listener for a specified event type.
301 *
302 * @param {string} type the event name
303 * @param {Function} listener the callback to be invoked when an event of
304 * the specified type occurs
305 */
306
307
308 var _proto = Stream.prototype;
309
310 _proto.on = function on(type, listener) {
311 if (!this.listeners[type]) {
312 this.listeners[type] = [];
313 }
314
315 this.listeners[type].push(listener);
316 }
317 /**
318 * Remove a listener for a specified event type.
319 *
320 * @param {string} type the event name
321 * @param {Function} listener a function previously registered for this
322 * type of event through `on`
323 * @return {boolean} if we could turn it off or not
324 */
325 ;
326
327 _proto.off = function off(type, listener) {
328 if (!this.listeners[type]) {
329 return false;
330 }
331
332 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
333 // In Video.js we slice listener functions
334 // on trigger so that it does not mess up the order
335 // while we loop through.
336 //
337 // Here we slice on off so that the loop in trigger
338 // can continue using it's old reference to loop without
339 // messing up the order.
340
341 this.listeners[type] = this.listeners[type].slice(0);
342 this.listeners[type].splice(index, 1);
343 return index > -1;
344 }
345 /**
346 * Trigger an event of the specified type on this stream. Any additional
347 * arguments to this function are passed as parameters to event listeners.
348 *
349 * @param {string} type the event name
350 */
351 ;
352
353 _proto.trigger = function trigger(type) {
354 var callbacks = this.listeners[type];
355
356 if (!callbacks) {
357 return;
358 } // Slicing the arguments on every invocation of this method
359 // can add a significant amount of overhead. Avoid the
360 // intermediate object creation for the common case of a
361 // single callback argument
362
363
364 if (arguments.length === 2) {
365 var length = callbacks.length;
366
367 for (var i = 0; i < length; ++i) {
368 callbacks[i].call(this, arguments[1]);
369 }
370 } else {
371 var args = Array.prototype.slice.call(arguments, 1);
372 var _length = callbacks.length;
373
374 for (var _i = 0; _i < _length; ++_i) {
375 callbacks[_i].apply(this, args);
376 }
377 }
378 }
379 /**
380 * Destroys the stream and cleans up.
381 */
382 ;
383
384 _proto.dispose = function dispose() {
385 this.listeners = {};
386 }
387 /**
388 * Forwards all `data` events on this stream to the destination stream. The
389 * destination stream should provide a method `push` to receive the data
390 * events as they arrive.
391 *
392 * @param {Stream} destination the stream that will receive all `data` events
393 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
394 */
395 ;
396
397 _proto.pipe = function pipe(destination) {
398 this.on('data', function (data) {
399 destination.push(data);
400 });
401 };
402
403 return Stream;
404 }();
405
406 var _extends_1 = createCommonjsModule(function (module) {
407 function _extends() {
408 module.exports = _extends = Object.assign || function (target) {
409 for (var i = 1; i < arguments.length; i++) {
410 var source = arguments[i];
411
412 for (var key in source) {
413 if (Object.prototype.hasOwnProperty.call(source, key)) {
414 target[key] = source[key];
415 }
416 }
417 }
418
419 return target;
420 };
421
422 module.exports["default"] = module.exports, module.exports.__esModule = true;
423 return _extends.apply(this, arguments);
424 }
425
426 module.exports = _extends;
427 module.exports["default"] = module.exports, module.exports.__esModule = true;
428 });
429
430 var atob = function atob(s) {
431 return window__default['default'].atob ? window__default['default'].atob(s) : Buffer.from(s, 'base64').toString('binary');
432 };
433
434 function decodeB64ToUint8Array(b64Text) {
435 var decodedString = atob(b64Text);
436 var array = new Uint8Array(decodedString.length);
437
438 for (var i = 0; i < decodedString.length; i++) {
439 array[i] = decodedString.charCodeAt(i);
440 }
441
442 return array;
443 }
444
445 /*! @name m3u8-parser @version 4.6.0 @license Apache-2.0 */
446 /**
447 * A stream that buffers string input and generates a `data` event for each
448 * line.
449 *
450 * @class LineStream
451 * @extends Stream
452 */
453
454 var LineStream = /*#__PURE__*/function (_Stream) {
455 inheritsLoose(LineStream, _Stream);
456
457 function LineStream() {
458 var _this;
459
460 _this = _Stream.call(this) || this;
461 _this.buffer = '';
462 return _this;
463 }
464 /**
465 * Add new data to be parsed.
466 *
467 * @param {string} data the text to process
468 */
469
470
471 var _proto = LineStream.prototype;
472
473 _proto.push = function push(data) {
474 var nextNewline;
475 this.buffer += data;
476 nextNewline = this.buffer.indexOf('\n');
477
478 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
479 this.trigger('data', this.buffer.substring(0, nextNewline));
480 this.buffer = this.buffer.substring(nextNewline + 1);
481 }
482 };
483
484 return LineStream;
485 }(Stream$1);
486
487 var TAB = String.fromCharCode(0x09);
488
489 var parseByterange = function parseByterange(byterangeString) {
490 // optionally match and capture 0+ digits before `@`
491 // optionally match and capture 0+ digits after `@`
492 var match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
493 var result = {};
494
495 if (match[1]) {
496 result.length = parseInt(match[1], 10);
497 }
498
499 if (match[2]) {
500 result.offset = parseInt(match[2], 10);
501 }
502
503 return result;
504 };
505 /**
506 * "forgiving" attribute list psuedo-grammar:
507 * attributes -> keyvalue (',' keyvalue)*
508 * keyvalue -> key '=' value
509 * key -> [^=]*
510 * value -> '"' [^"]* '"' | [^,]*
511 */
512
513
514 var attributeSeparator = function attributeSeparator() {
515 var key = '[^=]*';
516 var value = '"[^"]*"|[^,]*';
517 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
518 return new RegExp('(?:^|,)(' + keyvalue + ')');
519 };
520 /**
521 * Parse attributes from a line given the separator
522 *
523 * @param {string} attributes the attribute line to parse
524 */
525
526
527 var parseAttributes$1 = function parseAttributes(attributes) {
528 // split the string using attributes as the separator
529 var attrs = attributes.split(attributeSeparator());
530 var result = {};
531 var i = attrs.length;
532 var attr;
533
534 while (i--) {
535 // filter out unmatched portions of the string
536 if (attrs[i] === '') {
537 continue;
538 } // split the key and value
539
540
541 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
542
543 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
544 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
545 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
546 result[attr[0]] = attr[1];
547 }
548
549 return result;
550 };
551 /**
552 * A line-level M3U8 parser event stream. It expects to receive input one
553 * line at a time and performs a context-free parse of its contents. A stream
554 * interpretation of a manifest can be useful if the manifest is expected to
555 * be too large to fit comfortably into memory or the entirety of the input
556 * is not immediately available. Otherwise, it's probably much easier to work
557 * with a regular `Parser` object.
558 *
559 * Produces `data` events with an object that captures the parser's
560 * interpretation of the input. That object has a property `tag` that is one
561 * of `uri`, `comment`, or `tag`. URIs only have a single additional
562 * property, `line`, which captures the entirety of the input without
563 * interpretation. Comments similarly have a single additional property
564 * `text` which is the input without the leading `#`.
565 *
566 * Tags always have a property `tagType` which is the lower-cased version of
567 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
568 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
569 * tags are given the tag type `unknown` and a single additional property
570 * `data` with the remainder of the input.
571 *
572 * @class ParseStream
573 * @extends Stream
574 */
575
576
577 var ParseStream = /*#__PURE__*/function (_Stream) {
578 inheritsLoose(ParseStream, _Stream);
579
580 function ParseStream() {
581 var _this;
582
583 _this = _Stream.call(this) || this;
584 _this.customParsers = [];
585 _this.tagMappers = [];
586 return _this;
587 }
588 /**
589 * Parses an additional line of input.
590 *
591 * @param {string} line a single line of an M3U8 file to parse
592 */
593
594
595 var _proto = ParseStream.prototype;
596
597 _proto.push = function push(line) {
598 var _this2 = this;
599
600 var match;
601 var event; // strip whitespace
602
603 line = line.trim();
604
605 if (line.length === 0) {
606 // ignore empty lines
607 return;
608 } // URIs
609
610
611 if (line[0] !== '#') {
612 this.trigger('data', {
613 type: 'uri',
614 uri: line
615 });
616 return;
617 } // map tags
618
619
620 var newLines = this.tagMappers.reduce(function (acc, mapper) {
621 var mappedLine = mapper(line); // skip if unchanged
622
623 if (mappedLine === line) {
624 return acc;
625 }
626
627 return acc.concat([mappedLine]);
628 }, [line]);
629 newLines.forEach(function (newLine) {
630 for (var i = 0; i < _this2.customParsers.length; i++) {
631 if (_this2.customParsers[i].call(_this2, newLine)) {
632 return;
633 }
634 } // Comments
635
636
637 if (newLine.indexOf('#EXT') !== 0) {
638 _this2.trigger('data', {
639 type: 'comment',
640 text: newLine.slice(1)
641 });
642
643 return;
644 } // strip off any carriage returns here so the regex matching
645 // doesn't have to account for them.
646
647
648 newLine = newLine.replace('\r', ''); // Tags
649
650 match = /^#EXTM3U/.exec(newLine);
651
652 if (match) {
653 _this2.trigger('data', {
654 type: 'tag',
655 tagType: 'm3u'
656 });
657
658 return;
659 }
660
661 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
662
663 if (match) {
664 event = {
665 type: 'tag',
666 tagType: 'inf'
667 };
668
669 if (match[1]) {
670 event.duration = parseFloat(match[1]);
671 }
672
673 if (match[2]) {
674 event.title = match[2];
675 }
676
677 _this2.trigger('data', event);
678
679 return;
680 }
681
682 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
683
684 if (match) {
685 event = {
686 type: 'tag',
687 tagType: 'targetduration'
688 };
689
690 if (match[1]) {
691 event.duration = parseInt(match[1], 10);
692 }
693
694 _this2.trigger('data', event);
695
696 return;
697 }
698
699 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
700
701 if (match) {
702 event = {
703 type: 'tag',
704 tagType: 'version'
705 };
706
707 if (match[1]) {
708 event.version = parseInt(match[1], 10);
709 }
710
711 _this2.trigger('data', event);
712
713 return;
714 }
715
716 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
717
718 if (match) {
719 event = {
720 type: 'tag',
721 tagType: 'media-sequence'
722 };
723
724 if (match[1]) {
725 event.number = parseInt(match[1], 10);
726 }
727
728 _this2.trigger('data', event);
729
730 return;
731 }
732
733 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
734
735 if (match) {
736 event = {
737 type: 'tag',
738 tagType: 'discontinuity-sequence'
739 };
740
741 if (match[1]) {
742 event.number = parseInt(match[1], 10);
743 }
744
745 _this2.trigger('data', event);
746
747 return;
748 }
749
750 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
751
752 if (match) {
753 event = {
754 type: 'tag',
755 tagType: 'playlist-type'
756 };
757
758 if (match[1]) {
759 event.playlistType = match[1];
760 }
761
762 _this2.trigger('data', event);
763
764 return;
765 }
766
767 match = /^#EXT-X-BYTERANGE:?(.*)?$/.exec(newLine);
768
769 if (match) {
770 event = _extends_1(parseByterange(match[1]), {
771 type: 'tag',
772 tagType: 'byterange'
773 });
774
775 _this2.trigger('data', event);
776
777 return;
778 }
779
780 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
781
782 if (match) {
783 event = {
784 type: 'tag',
785 tagType: 'allow-cache'
786 };
787
788 if (match[1]) {
789 event.allowed = !/NO/.test(match[1]);
790 }
791
792 _this2.trigger('data', event);
793
794 return;
795 }
796
797 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
798
799 if (match) {
800 event = {
801 type: 'tag',
802 tagType: 'map'
803 };
804
805 if (match[1]) {
806 var attributes = parseAttributes$1(match[1]);
807
808 if (attributes.URI) {
809 event.uri = attributes.URI;
810 }
811
812 if (attributes.BYTERANGE) {
813 event.byterange = parseByterange(attributes.BYTERANGE);
814 }
815 }
816
817 _this2.trigger('data', event);
818
819 return;
820 }
821
822 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
823
824 if (match) {
825 event = {
826 type: 'tag',
827 tagType: 'stream-inf'
828 };
829
830 if (match[1]) {
831 event.attributes = parseAttributes$1(match[1]);
832
833 if (event.attributes.RESOLUTION) {
834 var split = event.attributes.RESOLUTION.split('x');
835 var resolution = {};
836
837 if (split[0]) {
838 resolution.width = parseInt(split[0], 10);
839 }
840
841 if (split[1]) {
842 resolution.height = parseInt(split[1], 10);
843 }
844
845 event.attributes.RESOLUTION = resolution;
846 }
847
848 if (event.attributes.BANDWIDTH) {
849 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
850 }
851
852 if (event.attributes['PROGRAM-ID']) {
853 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
854 }
855 }
856
857 _this2.trigger('data', event);
858
859 return;
860 }
861
862 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
863
864 if (match) {
865 event = {
866 type: 'tag',
867 tagType: 'media'
868 };
869
870 if (match[1]) {
871 event.attributes = parseAttributes$1(match[1]);
872 }
873
874 _this2.trigger('data', event);
875
876 return;
877 }
878
879 match = /^#EXT-X-ENDLIST/.exec(newLine);
880
881 if (match) {
882 _this2.trigger('data', {
883 type: 'tag',
884 tagType: 'endlist'
885 });
886
887 return;
888 }
889
890 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
891
892 if (match) {
893 _this2.trigger('data', {
894 type: 'tag',
895 tagType: 'discontinuity'
896 });
897
898 return;
899 }
900
901 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
902
903 if (match) {
904 event = {
905 type: 'tag',
906 tagType: 'program-date-time'
907 };
908
909 if (match[1]) {
910 event.dateTimeString = match[1];
911 event.dateTimeObject = new Date(match[1]);
912 }
913
914 _this2.trigger('data', event);
915
916 return;
917 }
918
919 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
920
921 if (match) {
922 event = {
923 type: 'tag',
924 tagType: 'key'
925 };
926
927 if (match[1]) {
928 event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
929
930 if (event.attributes.IV) {
931 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
932 event.attributes.IV = event.attributes.IV.substring(2);
933 }
934
935 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
936 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
937 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
938 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
939 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
940 event.attributes.IV = new Uint32Array(event.attributes.IV);
941 }
942 }
943
944 _this2.trigger('data', event);
945
946 return;
947 }
948
949 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
950
951 if (match) {
952 event = {
953 type: 'tag',
954 tagType: 'start'
955 };
956
957 if (match[1]) {
958 event.attributes = parseAttributes$1(match[1]);
959 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
960 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
961 }
962
963 _this2.trigger('data', event);
964
965 return;
966 }
967
968 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
969
970 if (match) {
971 event = {
972 type: 'tag',
973 tagType: 'cue-out-cont'
974 };
975
976 if (match[1]) {
977 event.data = match[1];
978 } else {
979 event.data = '';
980 }
981
982 _this2.trigger('data', event);
983
984 return;
985 }
986
987 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
988
989 if (match) {
990 event = {
991 type: 'tag',
992 tagType: 'cue-out'
993 };
994
995 if (match[1]) {
996 event.data = match[1];
997 } else {
998 event.data = '';
999 }
1000
1001 _this2.trigger('data', event);
1002
1003 return;
1004 }
1005
1006 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
1007
1008 if (match) {
1009 event = {
1010 type: 'tag',
1011 tagType: 'cue-in'
1012 };
1013
1014 if (match[1]) {
1015 event.data = match[1];
1016 } else {
1017 event.data = '';
1018 }
1019
1020 _this2.trigger('data', event);
1021
1022 return;
1023 }
1024
1025 match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
1026
1027 if (match && match[1]) {
1028 event = {
1029 type: 'tag',
1030 tagType: 'skip'
1031 };
1032 event.attributes = parseAttributes$1(match[1]);
1033
1034 if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
1035 event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
1036 }
1037
1038 if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
1039 event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
1040 }
1041
1042 _this2.trigger('data', event);
1043
1044 return;
1045 }
1046
1047 match = /^#EXT-X-PART:(.*)$/.exec(newLine);
1048
1049 if (match && match[1]) {
1050 event = {
1051 type: 'tag',
1052 tagType: 'part'
1053 };
1054 event.attributes = parseAttributes$1(match[1]);
1055 ['DURATION'].forEach(function (key) {
1056 if (event.attributes.hasOwnProperty(key)) {
1057 event.attributes[key] = parseFloat(event.attributes[key]);
1058 }
1059 });
1060 ['INDEPENDENT', 'GAP'].forEach(function (key) {
1061 if (event.attributes.hasOwnProperty(key)) {
1062 event.attributes[key] = /YES/.test(event.attributes[key]);
1063 }
1064 });
1065
1066 if (event.attributes.hasOwnProperty('BYTERANGE')) {
1067 event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
1068 }
1069
1070 _this2.trigger('data', event);
1071
1072 return;
1073 }
1074
1075 match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
1076
1077 if (match && match[1]) {
1078 event = {
1079 type: 'tag',
1080 tagType: 'server-control'
1081 };
1082 event.attributes = parseAttributes$1(match[1]);
1083 ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
1084 if (event.attributes.hasOwnProperty(key)) {
1085 event.attributes[key] = parseFloat(event.attributes[key]);
1086 }
1087 });
1088 ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
1089 if (event.attributes.hasOwnProperty(key)) {
1090 event.attributes[key] = /YES/.test(event.attributes[key]);
1091 }
1092 });
1093
1094 _this2.trigger('data', event);
1095
1096 return;
1097 }
1098
1099 match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
1100
1101 if (match && match[1]) {
1102 event = {
1103 type: 'tag',
1104 tagType: 'part-inf'
1105 };
1106 event.attributes = parseAttributes$1(match[1]);
1107 ['PART-TARGET'].forEach(function (key) {
1108 if (event.attributes.hasOwnProperty(key)) {
1109 event.attributes[key] = parseFloat(event.attributes[key]);
1110 }
1111 });
1112
1113 _this2.trigger('data', event);
1114
1115 return;
1116 }
1117
1118 match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
1119
1120 if (match && match[1]) {
1121 event = {
1122 type: 'tag',
1123 tagType: 'preload-hint'
1124 };
1125 event.attributes = parseAttributes$1(match[1]);
1126 ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
1127 if (event.attributes.hasOwnProperty(key)) {
1128 event.attributes[key] = parseInt(event.attributes[key], 10);
1129 var subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
1130 event.attributes.byterange = event.attributes.byterange || {};
1131 event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
1132
1133 delete event.attributes[key];
1134 }
1135 });
1136
1137 _this2.trigger('data', event);
1138
1139 return;
1140 }
1141
1142 match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
1143
1144 if (match && match[1]) {
1145 event = {
1146 type: 'tag',
1147 tagType: 'rendition-report'
1148 };
1149 event.attributes = parseAttributes$1(match[1]);
1150 ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
1151 if (event.attributes.hasOwnProperty(key)) {
1152 event.attributes[key] = parseInt(event.attributes[key], 10);
1153 }
1154 });
1155
1156 _this2.trigger('data', event);
1157
1158 return;
1159 } // unknown tag type
1160
1161
1162 _this2.trigger('data', {
1163 type: 'tag',
1164 data: newLine.slice(4)
1165 });
1166 });
1167 }
1168 /**
1169 * Add a parser for custom headers
1170 *
1171 * @param {Object} options a map of options for the added parser
1172 * @param {RegExp} options.expression a regular expression to match the custom header
1173 * @param {string} options.customType the custom type to register to the output
1174 * @param {Function} [options.dataParser] function to parse the line into an object
1175 * @param {boolean} [options.segment] should tag data be attached to the segment object
1176 */
1177 ;
1178
1179 _proto.addParser = function addParser(_ref) {
1180 var _this3 = this;
1181
1182 var expression = _ref.expression,
1183 customType = _ref.customType,
1184 dataParser = _ref.dataParser,
1185 segment = _ref.segment;
1186
1187 if (typeof dataParser !== 'function') {
1188 dataParser = function dataParser(line) {
1189 return line;
1190 };
1191 }
1192
1193 this.customParsers.push(function (line) {
1194 var match = expression.exec(line);
1195
1196 if (match) {
1197 _this3.trigger('data', {
1198 type: 'custom',
1199 data: dataParser(line),
1200 customType: customType,
1201 segment: segment
1202 });
1203
1204 return true;
1205 }
1206 });
1207 }
1208 /**
1209 * Add a custom header mapper
1210 *
1211 * @param {Object} options
1212 * @param {RegExp} options.expression a regular expression to match the custom header
1213 * @param {Function} options.map function to translate tag into a different tag
1214 */
1215 ;
1216
1217 _proto.addTagMapper = function addTagMapper(_ref2) {
1218 var expression = _ref2.expression,
1219 map = _ref2.map;
1220
1221 var mapFn = function mapFn(line) {
1222 if (expression.test(line)) {
1223 return map(line);
1224 }
1225
1226 return line;
1227 };
1228
1229 this.tagMappers.push(mapFn);
1230 };
1231
1232 return ParseStream;
1233 }(Stream$1);
1234
1235 var camelCase = function camelCase(str) {
1236 return str.toLowerCase().replace(/-(\w)/g, function (a) {
1237 return a[1].toUpperCase();
1238 });
1239 };
1240
1241 var camelCaseKeys = function camelCaseKeys(attributes) {
1242 var result = {};
1243 Object.keys(attributes).forEach(function (key) {
1244 result[camelCase(key)] = attributes[key];
1245 });
1246 return result;
1247 }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
1248 // we need this helper because defaults are based upon targetDuration and
1249 // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
1250 // target durations are set.
1251
1252
1253 var setHoldBack = function setHoldBack(manifest) {
1254 var serverControl = manifest.serverControl,
1255 targetDuration = manifest.targetDuration,
1256 partTargetDuration = manifest.partTargetDuration;
1257
1258 if (!serverControl) {
1259 return;
1260 }
1261
1262 var tag = '#EXT-X-SERVER-CONTROL';
1263 var hb = 'holdBack';
1264 var phb = 'partHoldBack';
1265 var minTargetDuration = targetDuration && targetDuration * 3;
1266 var minPartDuration = partTargetDuration && partTargetDuration * 2;
1267
1268 if (targetDuration && !serverControl.hasOwnProperty(hb)) {
1269 serverControl[hb] = minTargetDuration;
1270 this.trigger('info', {
1271 message: tag + " defaulting HOLD-BACK to targetDuration * 3 (" + minTargetDuration + ")."
1272 });
1273 }
1274
1275 if (minTargetDuration && serverControl[hb] < minTargetDuration) {
1276 this.trigger('warn', {
1277 message: tag + " clamping HOLD-BACK (" + serverControl[hb] + ") to targetDuration * 3 (" + minTargetDuration + ")"
1278 });
1279 serverControl[hb] = minTargetDuration;
1280 } // default no part hold back to part target duration * 3
1281
1282
1283 if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
1284 serverControl[phb] = partTargetDuration * 3;
1285 this.trigger('info', {
1286 message: tag + " defaulting PART-HOLD-BACK to partTargetDuration * 3 (" + serverControl[phb] + ")."
1287 });
1288 } // if part hold back is too small default it to part target duration * 2
1289
1290
1291 if (partTargetDuration && serverControl[phb] < minPartDuration) {
1292 this.trigger('warn', {
1293 message: tag + " clamping PART-HOLD-BACK (" + serverControl[phb] + ") to partTargetDuration * 2 (" + minPartDuration + ")."
1294 });
1295 serverControl[phb] = minPartDuration;
1296 }
1297 };
1298 /**
1299 * A parser for M3U8 files. The current interpretation of the input is
1300 * exposed as a property `manifest` on parser objects. It's just two lines to
1301 * create and parse a manifest once you have the contents available as a string:
1302 *
1303 * ```js
1304 * var parser = new m3u8.Parser();
1305 * parser.push(xhr.responseText);
1306 * ```
1307 *
1308 * New input can later be applied to update the manifest object by calling
1309 * `push` again.
1310 *
1311 * The parser attempts to create a usable manifest object even if the
1312 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1313 * events during the parse if it encounters input that seems invalid or
1314 * requires some property of the manifest object to be defaulted.
1315 *
1316 * @class Parser
1317 * @extends Stream
1318 */
1319
1320
1321 var Parser = /*#__PURE__*/function (_Stream) {
1322 inheritsLoose(Parser, _Stream);
1323
1324 function Parser() {
1325 var _this;
1326
1327 _this = _Stream.call(this) || this;
1328 _this.lineStream = new LineStream();
1329 _this.parseStream = new ParseStream();
1330
1331 _this.lineStream.pipe(_this.parseStream);
1332 /* eslint-disable consistent-this */
1333
1334
1335 var self = assertThisInitialized(_this);
1336 /* eslint-enable consistent-this */
1337
1338
1339 var uris = [];
1340 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1341
1342 var currentMap; // if specified, the active decryption key
1343
1344 var _key;
1345
1346 var hasParts = false;
1347
1348 var noop = function noop() {};
1349
1350 var defaultMediaGroups = {
1351 'AUDIO': {},
1352 'VIDEO': {},
1353 'CLOSED-CAPTIONS': {},
1354 'SUBTITLES': {}
1355 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1356 // used in MPDs with Widevine encrypted streams.
1357
1358 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1359
1360 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1361
1362 _this.manifest = {
1363 allowCache: true,
1364 discontinuityStarts: [],
1365 segments: []
1366 }; // keep track of the last seen segment's byte range end, as segments are not required
1367 // to provide the offset, in which case it defaults to the next byte after the
1368 // previous segment
1369
1370 var lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
1371
1372 var lastPartByterangeEnd = 0;
1373
1374 _this.on('end', function () {
1375 // only add preloadSegment if we don't yet have a uri for it.
1376 // and we actually have parts/preloadHints
1377 if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
1378 return;
1379 }
1380
1381 if (!currentUri.map && currentMap) {
1382 currentUri.map = currentMap;
1383 }
1384
1385 if (!currentUri.key && _key) {
1386 currentUri.key = _key;
1387 }
1388
1389 if (!currentUri.timeline && typeof currentTimeline === 'number') {
1390 currentUri.timeline = currentTimeline;
1391 }
1392
1393 _this.manifest.preloadSegment = currentUri;
1394 }); // update the manifest with the m3u8 entry from the parse stream
1395
1396
1397 _this.parseStream.on('data', function (entry) {
1398 var mediaGroup;
1399 var rendition;
1400 ({
1401 tag: function tag() {
1402 // switch based on the tag type
1403 (({
1404 version: function version() {
1405 if (entry.version) {
1406 this.manifest.version = entry.version;
1407 }
1408 },
1409 'allow-cache': function allowCache() {
1410 this.manifest.allowCache = entry.allowed;
1411
1412 if (!('allowed' in entry)) {
1413 this.trigger('info', {
1414 message: 'defaulting allowCache to YES'
1415 });
1416 this.manifest.allowCache = true;
1417 }
1418 },
1419 byterange: function byterange() {
1420 var byterange = {};
1421
1422 if ('length' in entry) {
1423 currentUri.byterange = byterange;
1424 byterange.length = entry.length;
1425
1426 if (!('offset' in entry)) {
1427 /*
1428 * From the latest spec (as of this writing):
1429 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
1430 *
1431 * Same text since EXT-X-BYTERANGE's introduction in draft 7:
1432 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
1433 *
1434 * "If o [offset] is not present, the sub-range begins at the next byte
1435 * following the sub-range of the previous media segment."
1436 */
1437 entry.offset = lastByterangeEnd;
1438 }
1439 }
1440
1441 if ('offset' in entry) {
1442 currentUri.byterange = byterange;
1443 byterange.offset = entry.offset;
1444 }
1445
1446 lastByterangeEnd = byterange.offset + byterange.length;
1447 },
1448 endlist: function endlist() {
1449 this.manifest.endList = true;
1450 },
1451 inf: function inf() {
1452 if (!('mediaSequence' in this.manifest)) {
1453 this.manifest.mediaSequence = 0;
1454 this.trigger('info', {
1455 message: 'defaulting media sequence to zero'
1456 });
1457 }
1458
1459 if (!('discontinuitySequence' in this.manifest)) {
1460 this.manifest.discontinuitySequence = 0;
1461 this.trigger('info', {
1462 message: 'defaulting discontinuity sequence to zero'
1463 });
1464 }
1465
1466 if (entry.duration > 0) {
1467 currentUri.duration = entry.duration;
1468 }
1469
1470 if (entry.duration === 0) {
1471 currentUri.duration = 0.01;
1472 this.trigger('info', {
1473 message: 'updating zero segment duration to a small value'
1474 });
1475 }
1476
1477 this.manifest.segments = uris;
1478 },
1479 key: function key() {
1480 if (!entry.attributes) {
1481 this.trigger('warn', {
1482 message: 'ignoring key declaration without attribute list'
1483 });
1484 return;
1485 } // clear the active encryption key
1486
1487
1488 if (entry.attributes.METHOD === 'NONE') {
1489 _key = null;
1490 return;
1491 }
1492
1493 if (!entry.attributes.URI) {
1494 this.trigger('warn', {
1495 message: 'ignoring key declaration without URI'
1496 });
1497 return;
1498 } // check if the content is encrypted for Widevine
1499 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1500
1501
1502 if (entry.attributes.KEYFORMAT === widevineUuid) {
1503 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1504
1505 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1506 this.trigger('warn', {
1507 message: 'invalid key method provided for Widevine'
1508 });
1509 return;
1510 }
1511
1512 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1513 this.trigger('warn', {
1514 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1515 });
1516 }
1517
1518 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1519 this.trigger('warn', {
1520 message: 'invalid key URI provided for Widevine'
1521 });
1522 return;
1523 }
1524
1525 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1526 this.trigger('warn', {
1527 message: 'invalid key ID provided for Widevine'
1528 });
1529 return;
1530 } // if Widevine key attributes are valid, store them as `contentProtection`
1531 // on the manifest to emulate Widevine tag structure in a DASH mpd
1532
1533
1534 this.manifest.contentProtection = {
1535 'com.widevine.alpha': {
1536 attributes: {
1537 schemeIdUri: entry.attributes.KEYFORMAT,
1538 // remove '0x' from the key id string
1539 keyId: entry.attributes.KEYID.substring(2)
1540 },
1541 // decode the base64-encoded PSSH box
1542 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1543 }
1544 };
1545 return;
1546 }
1547
1548 if (!entry.attributes.METHOD) {
1549 this.trigger('warn', {
1550 message: 'defaulting key method to AES-128'
1551 });
1552 } // setup an encryption key for upcoming segments
1553
1554
1555 _key = {
1556 method: entry.attributes.METHOD || 'AES-128',
1557 uri: entry.attributes.URI
1558 };
1559
1560 if (typeof entry.attributes.IV !== 'undefined') {
1561 _key.iv = entry.attributes.IV;
1562 }
1563 },
1564 'media-sequence': function mediaSequence() {
1565 if (!isFinite(entry.number)) {
1566 this.trigger('warn', {
1567 message: 'ignoring invalid media sequence: ' + entry.number
1568 });
1569 return;
1570 }
1571
1572 this.manifest.mediaSequence = entry.number;
1573 },
1574 'discontinuity-sequence': function discontinuitySequence() {
1575 if (!isFinite(entry.number)) {
1576 this.trigger('warn', {
1577 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1578 });
1579 return;
1580 }
1581
1582 this.manifest.discontinuitySequence = entry.number;
1583 currentTimeline = entry.number;
1584 },
1585 'playlist-type': function playlistType() {
1586 if (!/VOD|EVENT/.test(entry.playlistType)) {
1587 this.trigger('warn', {
1588 message: 'ignoring unknown playlist type: ' + entry.playlist
1589 });
1590 return;
1591 }
1592
1593 this.manifest.playlistType = entry.playlistType;
1594 },
1595 map: function map() {
1596 currentMap = {};
1597
1598 if (entry.uri) {
1599 currentMap.uri = entry.uri;
1600 }
1601
1602 if (entry.byterange) {
1603 currentMap.byterange = entry.byterange;
1604 }
1605 },
1606 'stream-inf': function streamInf() {
1607 this.manifest.playlists = uris;
1608 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1609
1610 if (!entry.attributes) {
1611 this.trigger('warn', {
1612 message: 'ignoring empty stream-inf attributes'
1613 });
1614 return;
1615 }
1616
1617 if (!currentUri.attributes) {
1618 currentUri.attributes = {};
1619 }
1620
1621 _extends_1(currentUri.attributes, entry.attributes);
1622 },
1623 media: function media() {
1624 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1625
1626 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1627 this.trigger('warn', {
1628 message: 'ignoring incomplete or missing media group'
1629 });
1630 return;
1631 } // find the media group, creating defaults as necessary
1632
1633
1634 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1635 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1636 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1637
1638 rendition = {
1639 default: /yes/i.test(entry.attributes.DEFAULT)
1640 };
1641
1642 if (rendition.default) {
1643 rendition.autoselect = true;
1644 } else {
1645 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1646 }
1647
1648 if (entry.attributes.LANGUAGE) {
1649 rendition.language = entry.attributes.LANGUAGE;
1650 }
1651
1652 if (entry.attributes.URI) {
1653 rendition.uri = entry.attributes.URI;
1654 }
1655
1656 if (entry.attributes['INSTREAM-ID']) {
1657 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1658 }
1659
1660 if (entry.attributes.CHARACTERISTICS) {
1661 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1662 }
1663
1664 if (entry.attributes.FORCED) {
1665 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1666 } // insert the new rendition
1667
1668
1669 mediaGroup[entry.attributes.NAME] = rendition;
1670 },
1671 discontinuity: function discontinuity() {
1672 currentTimeline += 1;
1673 currentUri.discontinuity = true;
1674 this.manifest.discontinuityStarts.push(uris.length);
1675 },
1676 'program-date-time': function programDateTime() {
1677 if (typeof this.manifest.dateTimeString === 'undefined') {
1678 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1679 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1680 // to the manifest object
1681 // TODO: Consider removing this in future major version
1682 this.manifest.dateTimeString = entry.dateTimeString;
1683 this.manifest.dateTimeObject = entry.dateTimeObject;
1684 }
1685
1686 currentUri.dateTimeString = entry.dateTimeString;
1687 currentUri.dateTimeObject = entry.dateTimeObject;
1688 },
1689 targetduration: function targetduration() {
1690 if (!isFinite(entry.duration) || entry.duration < 0) {
1691 this.trigger('warn', {
1692 message: 'ignoring invalid target duration: ' + entry.duration
1693 });
1694 return;
1695 }
1696
1697 this.manifest.targetDuration = entry.duration;
1698 setHoldBack.call(this, this.manifest);
1699 },
1700 start: function start() {
1701 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1702 this.trigger('warn', {
1703 message: 'ignoring start declaration without appropriate attribute list'
1704 });
1705 return;
1706 }
1707
1708 this.manifest.start = {
1709 timeOffset: entry.attributes['TIME-OFFSET'],
1710 precise: entry.attributes.PRECISE
1711 };
1712 },
1713 'cue-out': function cueOut() {
1714 currentUri.cueOut = entry.data;
1715 },
1716 'cue-out-cont': function cueOutCont() {
1717 currentUri.cueOutCont = entry.data;
1718 },
1719 'cue-in': function cueIn() {
1720 currentUri.cueIn = entry.data;
1721 },
1722 'skip': function skip() {
1723 this.manifest.skip = camelCaseKeys(entry.attributes);
1724 this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
1725 },
1726 'part': function part() {
1727 var _this2 = this;
1728
1729 hasParts = true; // parts are always specifed before a segment
1730
1731 var segmentIndex = this.manifest.segments.length;
1732 var part = camelCaseKeys(entry.attributes);
1733 currentUri.parts = currentUri.parts || [];
1734 currentUri.parts.push(part);
1735
1736 if (part.byterange) {
1737 if (!part.byterange.hasOwnProperty('offset')) {
1738 part.byterange.offset = lastPartByterangeEnd;
1739 }
1740
1741 lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
1742 }
1743
1744 var partIndex = currentUri.parts.length - 1;
1745 this.warnOnMissingAttributes_("#EXT-X-PART #" + partIndex + " for segment #" + segmentIndex, entry.attributes, ['URI', 'DURATION']);
1746
1747 if (this.manifest.renditionReports) {
1748 this.manifest.renditionReports.forEach(function (r, i) {
1749 if (!r.hasOwnProperty('lastPart')) {
1750 _this2.trigger('warn', {
1751 message: "#EXT-X-RENDITION-REPORT #" + i + " lacks required attribute(s): LAST-PART"
1752 });
1753 }
1754 });
1755 }
1756 },
1757 'server-control': function serverControl() {
1758 var attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
1759
1760 if (!attrs.hasOwnProperty('canBlockReload')) {
1761 attrs.canBlockReload = false;
1762 this.trigger('info', {
1763 message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
1764 });
1765 }
1766
1767 setHoldBack.call(this, this.manifest);
1768
1769 if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
1770 this.trigger('warn', {
1771 message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
1772 });
1773 }
1774 },
1775 'preload-hint': function preloadHint() {
1776 // parts are always specifed before a segment
1777 var segmentIndex = this.manifest.segments.length;
1778 var hint = camelCaseKeys(entry.attributes);
1779 var isPart = hint.type && hint.type === 'PART';
1780 currentUri.preloadHints = currentUri.preloadHints || [];
1781 currentUri.preloadHints.push(hint);
1782
1783 if (hint.byterange) {
1784 if (!hint.byterange.hasOwnProperty('offset')) {
1785 // use last part byterange end or zero if not a part.
1786 hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
1787
1788 if (isPart) {
1789 lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
1790 }
1791 }
1792 }
1793
1794 var index = currentUri.preloadHints.length - 1;
1795 this.warnOnMissingAttributes_("#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex, entry.attributes, ['TYPE', 'URI']);
1796
1797 if (!hint.type) {
1798 return;
1799 } // search through all preload hints except for the current one for
1800 // a duplicate type.
1801
1802
1803 for (var i = 0; i < currentUri.preloadHints.length - 1; i++) {
1804 var otherHint = currentUri.preloadHints[i];
1805
1806 if (!otherHint.type) {
1807 continue;
1808 }
1809
1810 if (otherHint.type === hint.type) {
1811 this.trigger('warn', {
1812 message: "#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex + " has the same TYPE " + hint.type + " as preload hint #" + i
1813 });
1814 }
1815 }
1816 },
1817 'rendition-report': function renditionReport() {
1818 var report = camelCaseKeys(entry.attributes);
1819 this.manifest.renditionReports = this.manifest.renditionReports || [];
1820 this.manifest.renditionReports.push(report);
1821 var index = this.manifest.renditionReports.length - 1;
1822 var required = ['LAST-MSN', 'URI'];
1823
1824 if (hasParts) {
1825 required.push('LAST-PART');
1826 }
1827
1828 this.warnOnMissingAttributes_("#EXT-X-RENDITION-REPORT #" + index, entry.attributes, required);
1829 },
1830 'part-inf': function partInf() {
1831 this.manifest.partInf = camelCaseKeys(entry.attributes);
1832 this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
1833
1834 if (this.manifest.partInf.partTarget) {
1835 this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
1836 }
1837
1838 setHoldBack.call(this, this.manifest);
1839 }
1840 })[entry.tagType] || noop).call(self);
1841 },
1842 uri: function uri() {
1843 currentUri.uri = entry.uri;
1844 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1845
1846 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1847 this.trigger('warn', {
1848 message: 'defaulting segment duration to the target duration'
1849 });
1850 currentUri.duration = this.manifest.targetDuration;
1851 } // annotate with encryption information, if necessary
1852
1853
1854 if (_key) {
1855 currentUri.key = _key;
1856 }
1857
1858 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1859
1860 if (currentMap) {
1861 currentUri.map = currentMap;
1862 } // reset the last byterange end as it needs to be 0 between parts
1863
1864
1865 lastPartByterangeEnd = 0; // prepare for the next URI
1866
1867 currentUri = {};
1868 },
1869 comment: function comment() {// comments are not important for playback
1870 },
1871 custom: function custom() {
1872 // if this is segment-level data attach the output to the segment
1873 if (entry.segment) {
1874 currentUri.custom = currentUri.custom || {};
1875 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1876 } else {
1877 this.manifest.custom = this.manifest.custom || {};
1878 this.manifest.custom[entry.customType] = entry.data;
1879 }
1880 }
1881 })[entry.type].call(self);
1882 });
1883
1884 return _this;
1885 }
1886
1887 var _proto = Parser.prototype;
1888
1889 _proto.warnOnMissingAttributes_ = function warnOnMissingAttributes_(identifier, attributes, required) {
1890 var missing = [];
1891 required.forEach(function (key) {
1892 if (!attributes.hasOwnProperty(key)) {
1893 missing.push(key);
1894 }
1895 });
1896
1897 if (missing.length) {
1898 this.trigger('warn', {
1899 message: identifier + " lacks required attribute(s): " + missing.join(', ')
1900 });
1901 }
1902 }
1903 /**
1904 * Parse the input string and update the manifest object.
1905 *
1906 * @param {string} chunk a potentially incomplete portion of the manifest
1907 */
1908 ;
1909
1910 _proto.push = function push(chunk) {
1911 this.lineStream.push(chunk);
1912 }
1913 /**
1914 * Flush any remaining input. This can be handy if the last line of an M3U8
1915 * manifest did not contain a trailing newline but the file has been
1916 * completely received.
1917 */
1918 ;
1919
1920 _proto.end = function end() {
1921 // flush any buffered input
1922 this.lineStream.push('\n');
1923 this.trigger('end');
1924 }
1925 /**
1926 * Add an additional parser for non-standard tags
1927 *
1928 * @param {Object} options a map of options for the added parser
1929 * @param {RegExp} options.expression a regular expression to match the custom header
1930 * @param {string} options.type the type to register to the output
1931 * @param {Function} [options.dataParser] function to parse the line into an object
1932 * @param {boolean} [options.segment] should tag data be attached to the segment object
1933 */
1934 ;
1935
1936 _proto.addParser = function addParser(options) {
1937 this.parseStream.addParser(options);
1938 }
1939 /**
1940 * Add a custom header mapper
1941 *
1942 * @param {Object} options
1943 * @param {RegExp} options.expression a regular expression to match the custom header
1944 * @param {Function} options.map function to translate tag into a different tag
1945 */
1946 ;
1947
1948 _proto.addTagMapper = function addTagMapper(options) {
1949 this.parseStream.addTagMapper(options);
1950 };
1951
1952 return Parser;
1953 }(Stream$1);
1954
1955 var log = videojs__default['default'].log;
1956 var createPlaylistID = function createPlaylistID(index, uri) {
1957 return index + "-" + uri;
1958 };
1959 /**
1960 * Parses a given m3u8 playlist
1961 *
1962 * @param {Function} [onwarn]
1963 * a function to call when the parser triggers a warning event.
1964 * @param {Function} [oninfo]
1965 * a function to call when the parser triggers an info event.
1966 * @param {string} manifestString
1967 * The downloaded manifest string
1968 * @param {Object[]} [customTagParsers]
1969 * An array of custom tag parsers for the m3u8-parser instance
1970 * @param {Object[]} [customTagMappers]
1971 * An array of custom tag mappers for the m3u8-parser instance
1972 * @param {boolean} [experimentalLLHLS=false]
1973 * Whether to keep ll-hls features in the manifest after parsing.
1974 * @return {Object}
1975 * The manifest object
1976 */
1977
1978 var parseManifest = function parseManifest(_ref) {
1979 var onwarn = _ref.onwarn,
1980 oninfo = _ref.oninfo,
1981 manifestString = _ref.manifestString,
1982 _ref$customTagParsers = _ref.customTagParsers,
1983 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
1984 _ref$customTagMappers = _ref.customTagMappers,
1985 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
1986 experimentalLLHLS = _ref.experimentalLLHLS;
1987 var parser = new Parser();
1988
1989 if (onwarn) {
1990 parser.on('warn', onwarn);
1991 }
1992
1993 if (oninfo) {
1994 parser.on('info', oninfo);
1995 }
1996
1997 customTagParsers.forEach(function (customParser) {
1998 return parser.addParser(customParser);
1999 });
2000 customTagMappers.forEach(function (mapper) {
2001 return parser.addTagMapper(mapper);
2002 });
2003 parser.push(manifestString);
2004 parser.end();
2005 var manifest = parser.manifest; // remove llhls features from the parsed manifest
2006 // if we don't want llhls support.
2007
2008 if (!experimentalLLHLS) {
2009 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
2010 if (manifest.hasOwnProperty(k)) {
2011 delete manifest[k];
2012 }
2013 });
2014
2015 if (manifest.segments) {
2016 manifest.segments.forEach(function (segment) {
2017 ['parts', 'preloadHints'].forEach(function (k) {
2018 if (segment.hasOwnProperty(k)) {
2019 delete segment[k];
2020 }
2021 });
2022 });
2023 }
2024 }
2025
2026 if (!manifest.targetDuration) {
2027 var targetDuration = 10;
2028
2029 if (manifest.segments && manifest.segments.length) {
2030 targetDuration = manifest.segments.reduce(function (acc, s) {
2031 return Math.max(acc, s.duration);
2032 }, 0);
2033 }
2034
2035 if (onwarn) {
2036 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
2037 }
2038
2039 manifest.targetDuration = targetDuration;
2040 }
2041
2042 return manifest;
2043 };
2044 /**
2045 * Loops through all supported media groups in master and calls the provided
2046 * callback for each group
2047 *
2048 * @param {Object} master
2049 * The parsed master manifest object
2050 * @param {Function} callback
2051 * Callback to call for each media group
2052 */
2053
2054 var forEachMediaGroup = function forEachMediaGroup(master, callback) {
2055 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
2056 for (var groupKey in master.mediaGroups[mediaType]) {
2057 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
2058 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
2059 callback(mediaProperties, mediaType, groupKey, labelKey);
2060 }
2061 }
2062 });
2063 };
2064 /**
2065 * Adds properties and attributes to the playlist to keep consistent functionality for
2066 * playlists throughout VHS.
2067 *
2068 * @param {Object} config
2069 * Arguments object
2070 * @param {Object} config.playlist
2071 * The media playlist
2072 * @param {string} [config.uri]
2073 * The uri to the media playlist (if media playlist is not from within a master
2074 * playlist)
2075 * @param {string} id
2076 * ID to use for the playlist
2077 */
2078
2079 var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
2080 var playlist = _ref2.playlist,
2081 uri = _ref2.uri,
2082 id = _ref2.id;
2083 playlist.id = id;
2084
2085 if (uri) {
2086 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
2087 // playlists do not contain their own source URI, but one is needed for consistency in
2088 // VHS.
2089 playlist.uri = uri;
2090 } // For HLS master playlists, even though certain attributes MUST be defined, the
2091 // stream may still be played without them.
2092 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
2093 // manifest.
2094 //
2095 // To avoid undefined reference errors through the project, and make the code easier
2096 // to write/read, add an empty attributes object for these cases.
2097
2098
2099 playlist.attributes = playlist.attributes || {};
2100 };
2101 /**
2102 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
2103 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
2104 * playlist references to the playlists array.
2105 *
2106 * @param {Object} master
2107 * The master playlist
2108 */
2109
2110 var setupMediaPlaylists = function setupMediaPlaylists(master) {
2111 var i = master.playlists.length;
2112
2113 while (i--) {
2114 var playlist = master.playlists[i];
2115 setupMediaPlaylist({
2116 playlist: playlist,
2117 id: createPlaylistID(i, playlist.uri)
2118 });
2119 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
2120 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
2121
2122 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
2123 // the stream can be played without it. Although an attributes property may have been
2124 // added to the playlist to prevent undefined references, issue a warning to fix the
2125 // manifest.
2126
2127 if (!playlist.attributes.BANDWIDTH) {
2128 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
2129 }
2130 }
2131 };
2132 /**
2133 * Adds resolvedUri properties to each media group.
2134 *
2135 * @param {Object} master
2136 * The master playlist
2137 */
2138
2139 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
2140 forEachMediaGroup(master, function (properties) {
2141 if (properties.uri) {
2142 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
2143 }
2144 });
2145 };
2146 /**
2147 * Creates a master playlist wrapper to insert a sole media playlist into.
2148 *
2149 * @param {Object} media
2150 * Media playlist
2151 * @param {string} uri
2152 * The media URI
2153 *
2154 * @return {Object}
2155 * Master playlist
2156 */
2157
2158 var masterForMedia = function masterForMedia(media, uri) {
2159 var id = createPlaylistID(0, uri);
2160 var master = {
2161 mediaGroups: {
2162 'AUDIO': {},
2163 'VIDEO': {},
2164 'CLOSED-CAPTIONS': {},
2165 'SUBTITLES': {}
2166 },
2167 uri: window__default['default'].location.href,
2168 resolvedUri: window__default['default'].location.href,
2169 playlists: [{
2170 uri: uri,
2171 id: id,
2172 resolvedUri: uri,
2173 // m3u8-parser does not attach an attributes property to media playlists so make
2174 // sure that the property is attached to avoid undefined reference errors
2175 attributes: {}
2176 }]
2177 }; // set up ID reference
2178
2179 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
2180
2181 master.playlists[uri] = master.playlists[0];
2182 return master;
2183 };
2184 /**
2185 * Does an in-place update of the master manifest to add updated playlist URI references
2186 * as well as other properties needed by VHS that aren't included by the parser.
2187 *
2188 * @param {Object} master
2189 * Master manifest object
2190 * @param {string} uri
2191 * The source URI
2192 */
2193
2194 var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
2195 master.uri = uri;
2196
2197 for (var i = 0; i < master.playlists.length; i++) {
2198 if (!master.playlists[i].uri) {
2199 // Set up phony URIs for the playlists since playlists are referenced by their URIs
2200 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
2201 // TODO: consider adding dummy URIs in mpd-parser
2202 var phonyUri = "placeholder-uri-" + i;
2203 master.playlists[i].uri = phonyUri;
2204 }
2205 }
2206
2207 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
2208 if (!properties.playlists || !properties.playlists.length || properties.playlists[0].uri) {
2209 return;
2210 } // Set up phony URIs for the media group playlists since playlists are referenced by
2211 // their URIs throughout VHS, but some formats (e.g., DASH) don't have external URIs
2212
2213
2214 var phonyUri = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey;
2215 var id = createPlaylistID(0, phonyUri);
2216 properties.playlists[0].uri = phonyUri;
2217 properties.playlists[0].id = id; // setup ID and URI references (URI for backwards compatibility)
2218
2219 master.playlists[id] = properties.playlists[0];
2220 master.playlists[phonyUri] = properties.playlists[0];
2221 });
2222 setupMediaPlaylists(master);
2223 resolveMediaGroupUris(master);
2224 };
2225
2226 var mergeOptions$2 = videojs__default['default'].mergeOptions,
2227 EventTarget$1 = videojs__default['default'].EventTarget;
2228 /**
2229 * Returns a new segment object with properties and
2230 * the parts array merged.
2231 *
2232 * @param {Object} a the old segment
2233 * @param {Object} b the new segment
2234 *
2235 * @return {Object} the merged segment
2236 */
2237
2238 var updateSegment = function updateSegment(a, b) {
2239 if (!a) {
2240 return b;
2241 }
2242
2243 var result = mergeOptions$2(a, b); // if only the old segment has parts
2244 // then the parts are no longer valid
2245
2246 if (a.parts && !b.parts) {
2247 delete result.parts; // if both segments have parts
2248 // copy part propeties from the old segment
2249 // to the new one.
2250 } else if (a.parts && b.parts) {
2251 for (var i = 0; i < b.parts.length; i++) {
2252 if (a.parts && a.parts[i]) {
2253 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
2254 }
2255 }
2256 }
2257
2258 return result;
2259 };
2260 /**
2261 * Returns a new array of segments that is the result of merging
2262 * properties from an older list of segments onto an updated
2263 * list. No properties on the updated playlist will be ovewritten.
2264 *
2265 * @param {Array} original the outdated list of segments
2266 * @param {Array} update the updated list of segments
2267 * @param {number=} offset the index of the first update
2268 * segment in the original segment list. For non-live playlists,
2269 * this should always be zero and does not need to be
2270 * specified. For live playlists, it should be the difference
2271 * between the media sequence numbers in the original and updated
2272 * playlists.
2273 * @return {Array} a list of merged segment objects
2274 */
2275
2276 var updateSegments = function updateSegments(original, update, offset) {
2277 var oldSegments = original.slice();
2278 var result = update.slice();
2279 offset = offset || 0;
2280 var length = Math.min(original.length, update.length + offset);
2281
2282 for (var i = offset; i < length; i++) {
2283 var newIndex = i - offset;
2284 result[newIndex] = updateSegment(oldSegments[i], result[newIndex]);
2285 }
2286
2287 return result;
2288 };
2289 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
2290 // preloadSegment will not have a uri at all
2291 // as the segment isn't actually in the manifest yet, only parts
2292 if (!segment.resolvedUri && segment.uri) {
2293 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
2294 }
2295
2296 if (segment.key && !segment.key.resolvedUri) {
2297 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
2298 }
2299
2300 if (segment.map && !segment.map.resolvedUri) {
2301 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
2302 }
2303
2304 if (segment.parts && segment.parts.length) {
2305 segment.parts.forEach(function (p) {
2306 if (p.resolvedUri) {
2307 return;
2308 }
2309
2310 p.resolvedUri = resolveUrl(baseUri, p.uri);
2311 });
2312 }
2313
2314 if (segment.preloadHints && segment.preloadHints.length) {
2315 segment.preloadHints.forEach(function (p) {
2316 if (p.resolvedUri) {
2317 return;
2318 }
2319
2320 p.resolvedUri = resolveUrl(baseUri, p.uri);
2321 });
2322 }
2323 };
2324
2325 var getAllSegments = function getAllSegments(media) {
2326 var segments = media.segments || []; // a preloadSegment with only preloadHints is not currently
2327 // a usable segment, only include a preloadSegment that has
2328 // parts.
2329
2330 if (media.preloadSegment && media.preloadSegment.parts) {
2331 segments.push(media.preloadSegment);
2332 }
2333
2334 return segments;
2335 }; // consider the playlist unchanged if the playlist object is the same or
2336 // the number of segments is equal, the media sequence number is unchanged,
2337 // and this playlist hasn't become the end of the playlist
2338
2339
2340 var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
2341 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence;
2342 };
2343 /**
2344 * Returns a new master playlist that is the result of merging an
2345 * updated media playlist into the original version. If the
2346 * updated media playlist does not match any of the playlist
2347 * entries in the original master playlist, null is returned.
2348 *
2349 * @param {Object} master a parsed master M3U8 object
2350 * @param {Object} media a parsed media M3U8 object
2351 * @return {Object} a new object that represents the original
2352 * master playlist with the updated media playlist merged in, or
2353 * null if the merge produced no change.
2354 */
2355
2356 var updateMaster$1 = function updateMaster(master, media, unchangedCheck) {
2357 if (unchangedCheck === void 0) {
2358 unchangedCheck = isPlaylistUnchanged;
2359 }
2360
2361 var result = mergeOptions$2(master, {});
2362 var playlist = result.playlists[media.id];
2363
2364 if (!playlist) {
2365 return null;
2366 }
2367
2368 if (unchangedCheck(playlist, media)) {
2369 return null;
2370 }
2371
2372 var mergedPlaylist = mergeOptions$2(playlist, media);
2373 media.segments = getAllSegments(media); // if the update could overlap existing segment information, merge the two segment lists
2374
2375 if (playlist.segments) {
2376 mergedPlaylist.segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
2377 } // resolve any segment URIs to prevent us from having to do it later
2378
2379
2380 mergedPlaylist.segments.forEach(function (segment) {
2381 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
2382 }); // TODO Right now in the playlists array there are two references to each playlist, one
2383 // that is referenced by index, and one by URI. The index reference may no longer be
2384 // necessary.
2385
2386 for (var i = 0; i < result.playlists.length; i++) {
2387 if (result.playlists[i].id === media.id) {
2388 result.playlists[i] = mergedPlaylist;
2389 }
2390 }
2391
2392 result.playlists[media.id] = mergedPlaylist; // URI reference added for backwards compatibility
2393
2394 result.playlists[media.uri] = mergedPlaylist;
2395 return result;
2396 };
2397 /**
2398 * Calculates the time to wait before refreshing a live playlist
2399 *
2400 * @param {Object} media
2401 * The current media
2402 * @param {boolean} update
2403 * True if there were any updates from the last refresh, false otherwise
2404 * @return {number}
2405 * The time in ms to wait before refreshing the live playlist
2406 */
2407
2408 var refreshDelay = function refreshDelay(media, update) {
2409 var lastSegment = media.segments[media.segments.length - 1];
2410 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
2411 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
2412
2413 if (update && lastDuration) {
2414 return lastDuration * 1000;
2415 } // if the playlist is unchanged since the last reload or last segment duration
2416 // cannot be determined, try again after half the target duration
2417
2418
2419 return (media.partTargetDuration || media.targetDuration || 10) * 500;
2420 };
2421 /**
2422 * Load a playlist from a remote location
2423 *
2424 * @class PlaylistLoader
2425 * @extends Stream
2426 * @param {string|Object} src url or object of manifest
2427 * @param {boolean} withCredentials the withCredentials xhr option
2428 * @class
2429 */
2430
2431 var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
2432 inheritsLoose(PlaylistLoader, _EventTarget);
2433
2434 function PlaylistLoader(src, vhs, options) {
2435 var _this;
2436
2437 if (options === void 0) {
2438 options = {};
2439 }
2440
2441 _this = _EventTarget.call(this) || this;
2442
2443 if (!src) {
2444 throw new Error('A non-empty playlist URL or object is required');
2445 }
2446
2447 _this.logger_ = logger('PlaylistLoader');
2448 var _options = options,
2449 _options$withCredenti = _options.withCredentials,
2450 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
2451 _options$handleManife = _options.handleManifestRedirects,
2452 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
2453 _this.src = src;
2454 _this.vhs_ = vhs;
2455 _this.withCredentials = withCredentials;
2456 _this.handleManifestRedirects = handleManifestRedirects;
2457 var vhsOptions = vhs.options_;
2458 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
2459 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
2460 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // initialize the loader state
2461
2462 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
2463
2464 _this.on('mediaupdatetimeout', function () {
2465 if (_this.state !== 'HAVE_METADATA') {
2466 // only refresh the media playlist if no other activity is going on
2467 return;
2468 }
2469
2470 _this.state = 'HAVE_CURRENT_METADATA';
2471 _this.request = _this.vhs_.xhr({
2472 uri: resolveUrl(_this.master.uri, _this.media().uri),
2473 withCredentials: _this.withCredentials
2474 }, function (error, req) {
2475 // disposed
2476 if (!_this.request) {
2477 return;
2478 }
2479
2480 if (error) {
2481 return _this.playlistRequestError(_this.request, _this.media(), 'HAVE_METADATA');
2482 }
2483
2484 _this.haveMetadata({
2485 playlistString: _this.request.responseText,
2486 url: _this.media().uri,
2487 id: _this.media().id
2488 });
2489 });
2490 });
2491
2492 return _this;
2493 }
2494
2495 var _proto = PlaylistLoader.prototype;
2496
2497 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
2498 var uri = playlist.uri,
2499 id = playlist.id; // any in-flight request is now finished
2500
2501 this.request = null;
2502
2503 if (startingState) {
2504 this.state = startingState;
2505 }
2506
2507 this.error = {
2508 playlist: this.master.playlists[id],
2509 status: xhr.status,
2510 message: "HLS playlist request error at URL: " + uri + ".",
2511 responseText: xhr.responseText,
2512 code: xhr.status >= 500 ? 4 : 2
2513 };
2514 this.trigger('error');
2515 }
2516 /**
2517 * Update the playlist loader's state in response to a new or updated playlist.
2518 *
2519 * @param {string} [playlistString]
2520 * Playlist string (if playlistObject is not provided)
2521 * @param {Object} [playlistObject]
2522 * Playlist object (if playlistString is not provided)
2523 * @param {string} url
2524 * URL of playlist
2525 * @param {string} id
2526 * ID to use for playlist
2527 */
2528 ;
2529
2530 _proto.haveMetadata = function haveMetadata(_ref) {
2531 var _this2 = this;
2532
2533 var playlistString = _ref.playlistString,
2534 playlistObject = _ref.playlistObject,
2535 url = _ref.url,
2536 id = _ref.id;
2537 // any in-flight request is now finished
2538 this.request = null;
2539 this.state = 'HAVE_METADATA';
2540 var playlist = playlistObject || parseManifest({
2541 onwarn: function onwarn(_ref2) {
2542 var message = _ref2.message;
2543 return _this2.logger_("m3u8-parser warn for " + id + ": " + message);
2544 },
2545 oninfo: function oninfo(_ref3) {
2546 var message = _ref3.message;
2547 return _this2.logger_("m3u8-parser info for " + id + ": " + message);
2548 },
2549 manifestString: playlistString,
2550 customTagParsers: this.customTagParsers,
2551 customTagMappers: this.customTagMappers,
2552 experimentalLLHLS: this.experimentalLLHLS
2553 });
2554 playlist.lastRequest = Date.now();
2555 setupMediaPlaylist({
2556 playlist: playlist,
2557 uri: url,
2558 id: id
2559 }); // merge this playlist into the master
2560
2561 var update = updateMaster$1(this.master, playlist);
2562 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
2563
2564 if (update) {
2565 this.master = update;
2566 this.media_ = this.master.playlists[id];
2567 } else {
2568 this.trigger('playlistunchanged');
2569 } // refresh live playlists after a target duration passes
2570
2571
2572 if (!this.media().endList) {
2573 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
2574 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
2575 _this2.trigger('mediaupdatetimeout');
2576 }, refreshDelay(this.media(), !!update));
2577 }
2578
2579 this.trigger('loadedplaylist');
2580 }
2581 /**
2582 * Abort any outstanding work and clean up.
2583 */
2584 ;
2585
2586 _proto.dispose = function dispose() {
2587 this.trigger('dispose');
2588 this.stopRequest();
2589 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
2590 window__default['default'].clearTimeout(this.finalRenditionTimeout);
2591 this.off();
2592 };
2593
2594 _proto.stopRequest = function stopRequest() {
2595 if (this.request) {
2596 var oldRequest = this.request;
2597 this.request = null;
2598 oldRequest.onreadystatechange = null;
2599 oldRequest.abort();
2600 }
2601 }
2602 /**
2603 * When called without any arguments, returns the currently
2604 * active media playlist. When called with a single argument,
2605 * triggers the playlist loader to asynchronously switch to the
2606 * specified media playlist. Calling this method while the
2607 * loader is in the HAVE_NOTHING causes an error to be emitted
2608 * but otherwise has no effect.
2609 *
2610 * @param {Object=} playlist the parsed media playlist
2611 * object to switch to
2612 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
2613 *
2614 * @return {Playlist} the current loaded media
2615 */
2616 ;
2617
2618 _proto.media = function media(playlist, shouldDelay) {
2619 var _this3 = this;
2620
2621 // getter
2622 if (!playlist) {
2623 return this.media_;
2624 } // setter
2625
2626
2627 if (this.state === 'HAVE_NOTHING') {
2628 throw new Error('Cannot switch media playlist from ' + this.state);
2629 } // find the playlist object if the target playlist has been
2630 // specified by URI
2631
2632
2633 if (typeof playlist === 'string') {
2634 if (!this.master.playlists[playlist]) {
2635 throw new Error('Unknown playlist URI: ' + playlist);
2636 }
2637
2638 playlist = this.master.playlists[playlist];
2639 }
2640
2641 window__default['default'].clearTimeout(this.finalRenditionTimeout);
2642
2643 if (shouldDelay) {
2644 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
2645 this.finalRenditionTimeout = window__default['default'].setTimeout(this.media.bind(this, playlist, false), delay);
2646 return;
2647 }
2648
2649 var startingState = this.state;
2650 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to fully loaded playlists immediately
2651
2652 if (this.master.playlists[playlist.id].endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
2653 // media playlist or, for the case of demuxed audio, a resolved audio media group)
2654 playlist.endList && playlist.segments.length) {
2655 // abort outstanding playlist requests
2656 if (this.request) {
2657 this.request.onreadystatechange = null;
2658 this.request.abort();
2659 this.request = null;
2660 }
2661
2662 this.state = 'HAVE_METADATA';
2663 this.media_ = playlist; // trigger media change if the active media has been updated
2664
2665 if (mediaChange) {
2666 this.trigger('mediachanging');
2667
2668 if (startingState === 'HAVE_MASTER') {
2669 // The initial playlist was a master manifest, and the first media selected was
2670 // also provided (in the form of a resolved playlist object) as part of the
2671 // source object (rather than just a URL). Therefore, since the media playlist
2672 // doesn't need to be requested, loadedmetadata won't trigger as part of the
2673 // normal flow, and needs an explicit trigger here.
2674 this.trigger('loadedmetadata');
2675 } else {
2676 this.trigger('mediachange');
2677 }
2678 }
2679
2680 return;
2681 } // switching to the active playlist is a no-op
2682
2683
2684 if (!mediaChange) {
2685 return;
2686 }
2687
2688 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
2689
2690 if (this.request) {
2691 if (playlist.resolvedUri === this.request.url) {
2692 // requesting to switch to the same playlist multiple times
2693 // has no effect after the first
2694 return;
2695 }
2696
2697 this.request.onreadystatechange = null;
2698 this.request.abort();
2699 this.request = null;
2700 } // request the new playlist
2701
2702
2703 if (this.media_) {
2704 this.trigger('mediachanging');
2705 }
2706
2707 this.request = this.vhs_.xhr({
2708 uri: playlist.resolvedUri,
2709 withCredentials: this.withCredentials
2710 }, function (error, req) {
2711 // disposed
2712 if (!_this3.request) {
2713 return;
2714 }
2715
2716 playlist.lastRequest = Date.now();
2717 playlist.resolvedUri = resolveManifestRedirect(_this3.handleManifestRedirects, playlist.resolvedUri, req);
2718
2719 if (error) {
2720 return _this3.playlistRequestError(_this3.request, playlist, startingState);
2721 }
2722
2723 _this3.haveMetadata({
2724 playlistString: req.responseText,
2725 url: playlist.uri,
2726 id: playlist.id
2727 }); // fire loadedmetadata the first time a media playlist is loaded
2728
2729
2730 if (startingState === 'HAVE_MASTER') {
2731 _this3.trigger('loadedmetadata');
2732 } else {
2733 _this3.trigger('mediachange');
2734 }
2735 });
2736 }
2737 /**
2738 * pause loading of the playlist
2739 */
2740 ;
2741
2742 _proto.pause = function pause() {
2743 this.stopRequest();
2744 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
2745
2746 if (this.state === 'HAVE_NOTHING') {
2747 // If we pause the loader before any data has been retrieved, its as if we never
2748 // started, so reset to an unstarted state.
2749 this.started = false;
2750 } // Need to restore state now that no activity is happening
2751
2752
2753 if (this.state === 'SWITCHING_MEDIA') {
2754 // if the loader was in the process of switching media, it should either return to
2755 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2756 // playlist yet. This is determined by the existence of loader.media_
2757 if (this.media_) {
2758 this.state = 'HAVE_METADATA';
2759 } else {
2760 this.state = 'HAVE_MASTER';
2761 }
2762 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2763 this.state = 'HAVE_METADATA';
2764 }
2765 }
2766 /**
2767 * start loading of the playlist
2768 */
2769 ;
2770
2771 _proto.load = function load(shouldDelay) {
2772 var _this4 = this;
2773
2774 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
2775 var media = this.media();
2776
2777 if (shouldDelay) {
2778 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
2779 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
2780 return _this4.load();
2781 }, delay);
2782 return;
2783 }
2784
2785 if (!this.started) {
2786 this.start();
2787 return;
2788 }
2789
2790 if (media && !media.endList) {
2791 this.trigger('mediaupdatetimeout');
2792 } else {
2793 this.trigger('loadedplaylist');
2794 }
2795 }
2796 /**
2797 * start loading of the playlist
2798 */
2799 ;
2800
2801 _proto.start = function start() {
2802 var _this5 = this;
2803
2804 this.started = true;
2805
2806 if (typeof this.src === 'object') {
2807 // in the case of an entirely constructed manifest object (meaning there's no actual
2808 // manifest on a server), default the uri to the page's href
2809 if (!this.src.uri) {
2810 this.src.uri = window__default['default'].location.href;
2811 } // resolvedUri is added on internally after the initial request. Since there's no
2812 // request for pre-resolved manifests, add on resolvedUri here.
2813
2814
2815 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
2816 // request can be skipped (since the top level of the manifest, at a minimum, is
2817 // already available as a parsed manifest object). However, if the manifest object
2818 // represents a master playlist, some media playlists may need to be resolved before
2819 // the starting segment list is available. Therefore, go directly to setup of the
2820 // initial playlist, and let the normal flow continue from there.
2821 //
2822 // Note that the call to setup is asynchronous, as other sections of VHS may assume
2823 // that the first request is asynchronous.
2824
2825 setTimeout(function () {
2826 _this5.setupInitialPlaylist(_this5.src);
2827 }, 0);
2828 return;
2829 } // request the specified URL
2830
2831
2832 this.request = this.vhs_.xhr({
2833 uri: this.src,
2834 withCredentials: this.withCredentials
2835 }, function (error, req) {
2836 // disposed
2837 if (!_this5.request) {
2838 return;
2839 } // clear the loader's request reference
2840
2841
2842 _this5.request = null;
2843
2844 if (error) {
2845 _this5.error = {
2846 status: req.status,
2847 message: "HLS playlist request error at URL: " + _this5.src + ".",
2848 responseText: req.responseText,
2849 // MEDIA_ERR_NETWORK
2850 code: 2
2851 };
2852
2853 if (_this5.state === 'HAVE_NOTHING') {
2854 _this5.started = false;
2855 }
2856
2857 return _this5.trigger('error');
2858 }
2859
2860 _this5.src = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.src, req);
2861 var manifest = parseManifest({
2862 manifestString: req.responseText,
2863 customTagParsers: _this5.customTagParsers,
2864 customTagMappers: _this5.customTagMappers,
2865 llhls: _this5.llhls
2866 });
2867
2868 _this5.setupInitialPlaylist(manifest);
2869 });
2870 };
2871
2872 _proto.srcUri = function srcUri() {
2873 return typeof this.src === 'string' ? this.src : this.src.uri;
2874 }
2875 /**
2876 * Given a manifest object that's either a master or media playlist, trigger the proper
2877 * events and set the state of the playlist loader.
2878 *
2879 * If the manifest object represents a master playlist, `loadedplaylist` will be
2880 * triggered to allow listeners to select a playlist. If none is selected, the loader
2881 * will default to the first one in the playlists array.
2882 *
2883 * If the manifest object represents a media playlist, `loadedplaylist` will be
2884 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
2885 *
2886 * In the case of a media playlist, a master playlist object wrapper with one playlist
2887 * will be created so that all logic can handle playlists in the same fashion (as an
2888 * assumed manifest object schema).
2889 *
2890 * @param {Object} manifest
2891 * The parsed manifest object
2892 */
2893 ;
2894
2895 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
2896 this.state = 'HAVE_MASTER';
2897
2898 if (manifest.playlists) {
2899 this.master = manifest;
2900 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
2901 // then resolve URIs in advance, as they are usually done after a playlist request,
2902 // which may not happen if the playlist is resolved.
2903
2904 manifest.playlists.forEach(function (playlist) {
2905 playlist.segments = getAllSegments(playlist);
2906 playlist.segments.forEach(function (segment) {
2907 resolveSegmentUris(segment, playlist.resolvedUri);
2908 });
2909 });
2910 this.trigger('loadedplaylist');
2911
2912 if (!this.request) {
2913 // no media playlist was specifically selected so start
2914 // from the first listed one
2915 this.media(this.master.playlists[0]);
2916 }
2917
2918 return;
2919 } // In order to support media playlists passed in as vhs-json, the case where the uri
2920 // is not provided as part of the manifest should be considered, and an appropriate
2921 // default used.
2922
2923
2924 var uri = this.srcUri() || window__default['default'].location.href;
2925 this.master = masterForMedia(manifest, uri);
2926 this.haveMetadata({
2927 playlistObject: manifest,
2928 url: uri,
2929 id: this.master.playlists[0].id
2930 });
2931 this.trigger('loadedmetadata');
2932 };
2933
2934 return PlaylistLoader;
2935 }(EventTarget$1);
2936
2937 /**
2938 * ranges
2939 *
2940 * Utilities for working with TimeRanges.
2941 *
2942 */
2943
2944 var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
2945 // can be misleading because of precision differences or when the current media has poorly
2946 // aligned audio and video, which can cause values to be slightly off from what you would
2947 // expect. This value is what we consider to be safe to use in such comparisons to account
2948 // for these scenarios.
2949
2950 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2951
2952 var filterRanges = function filterRanges(timeRanges, predicate) {
2953 var results = [];
2954 var i;
2955
2956 if (timeRanges && timeRanges.length) {
2957 // Search for ranges that match the predicate
2958 for (i = 0; i < timeRanges.length; i++) {
2959 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2960 results.push([timeRanges.start(i), timeRanges.end(i)]);
2961 }
2962 }
2963 }
2964
2965 return videojs__default['default'].createTimeRanges(results);
2966 };
2967 /**
2968 * Attempts to find the buffered TimeRange that contains the specified
2969 * time.
2970 *
2971 * @param {TimeRanges} buffered - the TimeRanges object to query
2972 * @param {number} time - the time to filter on.
2973 * @return {TimeRanges} a new TimeRanges object
2974 */
2975
2976
2977 var findRange = function findRange(buffered, time) {
2978 return filterRanges(buffered, function (start, end) {
2979 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2980 });
2981 };
2982 /**
2983 * Returns the TimeRanges that begin later than the specified time.
2984 *
2985 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2986 * @param {number} time - the time to filter on.
2987 * @return {TimeRanges} a new TimeRanges object.
2988 */
2989
2990 var findNextRange = function findNextRange(timeRanges, time) {
2991 return filterRanges(timeRanges, function (start) {
2992 return start - TIME_FUDGE_FACTOR >= time;
2993 });
2994 };
2995 /**
2996 * Returns gaps within a list of TimeRanges
2997 *
2998 * @param {TimeRanges} buffered - the TimeRanges object
2999 * @return {TimeRanges} a TimeRanges object of gaps
3000 */
3001
3002 var findGaps = function findGaps(buffered) {
3003 if (buffered.length < 2) {
3004 return videojs__default['default'].createTimeRanges();
3005 }
3006
3007 var ranges = [];
3008
3009 for (var i = 1; i < buffered.length; i++) {
3010 var start = buffered.end(i - 1);
3011 var end = buffered.start(i);
3012 ranges.push([start, end]);
3013 }
3014
3015 return videojs__default['default'].createTimeRanges(ranges);
3016 };
3017 /**
3018 * Calculate the intersection of two TimeRanges
3019 *
3020 * @param {TimeRanges} bufferA
3021 * @param {TimeRanges} bufferB
3022 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
3023 */
3024
3025 var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
3026 var start = null;
3027 var end = null;
3028 var arity = 0;
3029 var extents = [];
3030 var ranges = [];
3031
3032 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
3033 return videojs__default['default'].createTimeRange();
3034 } // Handle the case where we have both buffers and create an
3035 // intersection of the two
3036
3037
3038 var count = bufferA.length; // A) Gather up all start and end times
3039
3040 while (count--) {
3041 extents.push({
3042 time: bufferA.start(count),
3043 type: 'start'
3044 });
3045 extents.push({
3046 time: bufferA.end(count),
3047 type: 'end'
3048 });
3049 }
3050
3051 count = bufferB.length;
3052
3053 while (count--) {
3054 extents.push({
3055 time: bufferB.start(count),
3056 type: 'start'
3057 });
3058 extents.push({
3059 time: bufferB.end(count),
3060 type: 'end'
3061 });
3062 } // B) Sort them by time
3063
3064
3065 extents.sort(function (a, b) {
3066 return a.time - b.time;
3067 }); // C) Go along one by one incrementing arity for start and decrementing
3068 // arity for ends
3069
3070 for (count = 0; count < extents.length; count++) {
3071 if (extents[count].type === 'start') {
3072 arity++; // D) If arity is ever incremented to 2 we are entering an
3073 // overlapping range
3074
3075 if (arity === 2) {
3076 start = extents[count].time;
3077 }
3078 } else if (extents[count].type === 'end') {
3079 arity--; // E) If arity is ever decremented to 1 we leaving an
3080 // overlapping range
3081
3082 if (arity === 1) {
3083 end = extents[count].time;
3084 }
3085 } // F) Record overlapping ranges
3086
3087
3088 if (start !== null && end !== null) {
3089 ranges.push([start, end]);
3090 start = null;
3091 end = null;
3092 }
3093 }
3094
3095 return videojs__default['default'].createTimeRanges(ranges);
3096 };
3097 /**
3098 * Gets a human readable string for a TimeRange
3099 *
3100 * @param {TimeRange} range
3101 * @return {string} a human readable string
3102 */
3103
3104 var printableRange = function printableRange(range) {
3105 var strArr = [];
3106
3107 if (!range || !range.length) {
3108 return '';
3109 }
3110
3111 for (var i = 0; i < range.length; i++) {
3112 strArr.push(range.start(i) + ' => ' + range.end(i));
3113 }
3114
3115 return strArr.join(', ');
3116 };
3117 /**
3118 * Calculates the amount of time left in seconds until the player hits the end of the
3119 * buffer and causes a rebuffer
3120 *
3121 * @param {TimeRange} buffered
3122 * The state of the buffer
3123 * @param {Numnber} currentTime
3124 * The current time of the player
3125 * @param {number} playbackRate
3126 * The current playback rate of the player. Defaults to 1.
3127 * @return {number}
3128 * Time until the player has to start rebuffering in seconds.
3129 * @function timeUntilRebuffer
3130 */
3131
3132 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
3133 if (playbackRate === void 0) {
3134 playbackRate = 1;
3135 }
3136
3137 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
3138 return (bufferedEnd - currentTime) / playbackRate;
3139 };
3140 /**
3141 * Converts a TimeRanges object into an array representation
3142 *
3143 * @param {TimeRanges} timeRanges
3144 * @return {Array}
3145 */
3146
3147 var timeRangesToArray = function timeRangesToArray(timeRanges) {
3148 var timeRangesList = [];
3149
3150 for (var i = 0; i < timeRanges.length; i++) {
3151 timeRangesList.push({
3152 start: timeRanges.start(i),
3153 end: timeRanges.end(i)
3154 });
3155 }
3156
3157 return timeRangesList;
3158 };
3159 /**
3160 * Determines if two time range objects are different.
3161 *
3162 * @param {TimeRange} a
3163 * the first time range object to check
3164 *
3165 * @param {TimeRange} b
3166 * the second time range object to check
3167 *
3168 * @return {Boolean}
3169 * Whether the time range objects differ
3170 */
3171
3172 var isRangeDifferent = function isRangeDifferent(a, b) {
3173 // same object
3174 if (a === b) {
3175 return false;
3176 } // one or the other is undefined
3177
3178
3179 if (!a && b || !b && a) {
3180 return true;
3181 } // length is different
3182
3183
3184 if (a.length !== b.length) {
3185 return true;
3186 } // see if any start/end pair is different
3187
3188
3189 for (var i = 0; i < a.length; i++) {
3190 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
3191 return true;
3192 }
3193 } // if the length and every pair is the same
3194 // this is the same time range
3195
3196
3197 return false;
3198 };
3199
3200 var regexs = {
3201 // to determine mime types
3202 mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
3203 webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
3204 ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
3205 // to determine if a codec is audio or video
3206 video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
3207 audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
3208 text: /^(stpp.ttml.im1t)/,
3209 // mux.js support regex
3210 muxerVideo: /^(avc0?1)/,
3211 muxerAudio: /^(mp4a)/,
3212 // match nothing as muxer does not support text right now.
3213 // there cannot never be a character before the start of a string
3214 // so this matches nothing.
3215 muxerText: /a^/
3216 };
3217 var mediaTypes = ['video', 'audio', 'text'];
3218 var upperMediaTypes = ['Video', 'Audio', 'Text'];
3219 /**
3220 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
3221 * `avc1.<hhhhhh>`
3222 *
3223 * @param {string} codec
3224 * Codec string to translate
3225 * @return {string}
3226 * The translated codec string
3227 */
3228
3229 var translateLegacyCodec = function translateLegacyCodec(codec) {
3230 if (!codec) {
3231 return codec;
3232 }
3233
3234 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
3235 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
3236 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
3237 return 'avc1.' + profileHex + '00' + avcLevelHex;
3238 });
3239 };
3240 /**
3241 * @typedef {Object} ParsedCodecInfo
3242 * @property {number} codecCount
3243 * Number of codecs parsed
3244 * @property {string} [videoCodec]
3245 * Parsed video codec (if found)
3246 * @property {string} [videoObjectTypeIndicator]
3247 * Video object type indicator (if found)
3248 * @property {string|null} audioProfile
3249 * Audio profile
3250 */
3251
3252 /**
3253 * Parses a codec string to retrieve the number of codecs specified, the video codec and
3254 * object type indicator, and the audio profile.
3255 *
3256 * @param {string} [codecString]
3257 * The codec string to parse
3258 * @return {ParsedCodecInfo}
3259 * Parsed codec info
3260 */
3261
3262 var parseCodecs = function parseCodecs(codecString) {
3263 if (codecString === void 0) {
3264 codecString = '';
3265 }
3266
3267 var codecs = codecString.split(',');
3268 var result = [];
3269 codecs.forEach(function (codec) {
3270 codec = codec.trim();
3271 var codecType;
3272 mediaTypes.forEach(function (name) {
3273 var match = regexs[name].exec(codec.toLowerCase());
3274
3275 if (!match || match.length <= 1) {
3276 return;
3277 }
3278
3279 codecType = name; // maintain codec case
3280
3281 var type = codec.substring(0, match[1].length);
3282 var details = codec.replace(type, '');
3283 result.push({
3284 type: type,
3285 details: details,
3286 mediaType: name
3287 });
3288 });
3289
3290 if (!codecType) {
3291 result.push({
3292 type: codec,
3293 details: '',
3294 mediaType: 'unknown'
3295 });
3296 }
3297 });
3298 return result;
3299 };
3300 /**
3301 * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
3302 * a default alternate audio playlist for the provided audio group.
3303 *
3304 * @param {Object} master
3305 * The master playlist
3306 * @param {string} audioGroupId
3307 * ID of the audio group for which to find the default codec info
3308 * @return {ParsedCodecInfo}
3309 * Parsed codec info
3310 */
3311
3312 var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
3313 if (!master.mediaGroups.AUDIO || !audioGroupId) {
3314 return null;
3315 }
3316
3317 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
3318
3319 if (!audioGroup) {
3320 return null;
3321 }
3322
3323 for (var name in audioGroup) {
3324 var audioType = audioGroup[name];
3325
3326 if (audioType.default && audioType.playlists) {
3327 // codec should be the same for all playlists within the audio type
3328 return parseCodecs(audioType.playlists[0].attributes.CODECS);
3329 }
3330 }
3331
3332 return null;
3333 };
3334 var isAudioCodec = function isAudioCodec(codec) {
3335 if (codec === void 0) {
3336 codec = '';
3337 }
3338
3339 return regexs.audio.test(codec.trim().toLowerCase());
3340 };
3341 var isTextCodec = function isTextCodec(codec) {
3342 if (codec === void 0) {
3343 codec = '';
3344 }
3345
3346 return regexs.text.test(codec.trim().toLowerCase());
3347 };
3348 var getMimeForCodec = function getMimeForCodec(codecString) {
3349 if (!codecString || typeof codecString !== 'string') {
3350 return;
3351 }
3352
3353 var codecs = codecString.toLowerCase().split(',').map(function (c) {
3354 return translateLegacyCodec(c.trim());
3355 }); // default to video type
3356
3357 var type = 'video'; // only change to audio type if the only codec we have is
3358 // audio
3359
3360 if (codecs.length === 1 && isAudioCodec(codecs[0])) {
3361 type = 'audio';
3362 } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
3363 // text uses application/<container> for now
3364 type = 'application';
3365 } // default the container to mp4
3366
3367
3368 var container = 'mp4'; // every codec must be able to go into the container
3369 // for that container to be the correct one
3370
3371 if (codecs.every(function (c) {
3372 return regexs.mp4.test(c);
3373 })) {
3374 container = 'mp4';
3375 } else if (codecs.every(function (c) {
3376 return regexs.webm.test(c);
3377 })) {
3378 container = 'webm';
3379 } else if (codecs.every(function (c) {
3380 return regexs.ogg.test(c);
3381 })) {
3382 container = 'ogg';
3383 }
3384
3385 return type + "/" + container + ";codecs=\"" + codecString + "\"";
3386 };
3387 var browserSupportsCodec = function browserSupportsCodec(codecString) {
3388 if (codecString === void 0) {
3389 codecString = '';
3390 }
3391
3392 return window__default['default'].MediaSource && window__default['default'].MediaSource.isTypeSupported && window__default['default'].MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
3393 };
3394 var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
3395 if (codecString === void 0) {
3396 codecString = '';
3397 }
3398
3399 return codecString.toLowerCase().split(',').every(function (codec) {
3400 codec = codec.trim(); // any match is supported.
3401
3402 for (var i = 0; i < upperMediaTypes.length; i++) {
3403 var type = upperMediaTypes[i];
3404
3405 if (regexs["muxer" + type].test(codec)) {
3406 return true;
3407 }
3408 }
3409
3410 return false;
3411 });
3412 };
3413 var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
3414 var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
3415
3416 /**
3417 * @file playlist.js
3418 *
3419 * Playlist related utilities.
3420 */
3421 var createTimeRange = videojs__default['default'].createTimeRange;
3422 /**
3423 * A function to get a combined list of parts and segments with durations
3424 * and indexes.
3425 *
3426 * @param {Playlist} playlist the playlist to get the list for.
3427 *
3428 * @return {Array} The part/segment list.
3429 */
3430
3431 var getPartsAndSegments = function getPartsAndSegments(playlist) {
3432 return (playlist.segments || []).reduce(function (acc, segment, si) {
3433 if (segment.parts) {
3434 segment.parts.forEach(function (part, pi) {
3435 acc.push({
3436 duration: part.duration,
3437 segmentIndex: si,
3438 partIndex: pi
3439 });
3440 });
3441 } else {
3442 acc.push({
3443 duration: segment.duration,
3444 segmentIndex: si,
3445 partIndex: null
3446 });
3447 }
3448
3449 return acc;
3450 }, []);
3451 };
3452 /**
3453 * Get the number of seconds to delay from the end of a
3454 * live playlist.
3455 *
3456 * @param {Playlist} master the master playlist
3457 * @param {Playlist} media the media playlist
3458 * @return {number} the hold back in seconds.
3459 */
3460
3461
3462 var liveEdgeDelay = function liveEdgeDelay(master, media) {
3463 if (media.endList) {
3464 return 0;
3465 } // dash suggestedPresentationDelay trumps everything
3466
3467
3468 if (master && master.suggestedPresentationDelay) {
3469 return master.suggestedPresentationDelay;
3470 }
3471
3472 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
3473 var hasParts = lastSegment && lastSegment.parts && lastSegment.parts.length; // look for "part" delays from ll-hls first
3474
3475 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
3476 return media.serverControl.partHoldBack;
3477 } else if (hasParts && media.partTargetDuration) {
3478 return media.partTargetDuration * 3; // finally look for full segment delays
3479 } else if (media.serverControl && media.serverControl.holdBack) {
3480 return media.serverControl.holdBack;
3481 } else if (media.targetDuration) {
3482 return media.targetDuration * 3;
3483 }
3484
3485 return 0;
3486 };
3487 /**
3488 * walk backward until we find a duration we can use
3489 * or return a failure
3490 *
3491 * @param {Playlist} playlist the playlist to walk through
3492 * @param {Number} endSequence the mediaSequence to stop walking on
3493 */
3494
3495 var backwardDuration = function backwardDuration(playlist, endSequence) {
3496 var result = 0;
3497 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
3498 // the interval, use it
3499
3500 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
3501 // information that is earlier than endSequence
3502
3503 if (segment) {
3504 if (typeof segment.start !== 'undefined') {
3505 return {
3506 result: segment.start,
3507 precise: true
3508 };
3509 }
3510
3511 if (typeof segment.end !== 'undefined') {
3512 return {
3513 result: segment.end - segment.duration,
3514 precise: true
3515 };
3516 }
3517 }
3518
3519 while (i--) {
3520 segment = playlist.segments[i];
3521
3522 if (typeof segment.end !== 'undefined') {
3523 return {
3524 result: result + segment.end,
3525 precise: true
3526 };
3527 }
3528
3529 result += segment.duration;
3530
3531 if (typeof segment.start !== 'undefined') {
3532 return {
3533 result: result + segment.start,
3534 precise: true
3535 };
3536 }
3537 }
3538
3539 return {
3540 result: result,
3541 precise: false
3542 };
3543 };
3544 /**
3545 * walk forward until we find a duration we can use
3546 * or return a failure
3547 *
3548 * @param {Playlist} playlist the playlist to walk through
3549 * @param {number} endSequence the mediaSequence to stop walking on
3550 */
3551
3552
3553 var forwardDuration = function forwardDuration(playlist, endSequence) {
3554 var result = 0;
3555 var segment;
3556 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
3557 // information
3558
3559 for (; i < playlist.segments.length; i++) {
3560 segment = playlist.segments[i];
3561
3562 if (typeof segment.start !== 'undefined') {
3563 return {
3564 result: segment.start - result,
3565 precise: true
3566 };
3567 }
3568
3569 result += segment.duration;
3570
3571 if (typeof segment.end !== 'undefined') {
3572 return {
3573 result: segment.end - result,
3574 precise: true
3575 };
3576 }
3577 } // indicate we didn't find a useful duration estimate
3578
3579
3580 return {
3581 result: -1,
3582 precise: false
3583 };
3584 };
3585 /**
3586 * Calculate the media duration from the segments associated with a
3587 * playlist. The duration of a subinterval of the available segments
3588 * may be calculated by specifying an end index.
3589 *
3590 * @param {Object} playlist a media playlist object
3591 * @param {number=} endSequence an exclusive upper boundary
3592 * for the playlist. Defaults to playlist length.
3593 * @param {number} expired the amount of time that has dropped
3594 * off the front of the playlist in a live scenario
3595 * @return {number} the duration between the first available segment
3596 * and end index.
3597 */
3598
3599
3600 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
3601 if (typeof endSequence === 'undefined') {
3602 endSequence = playlist.mediaSequence + playlist.segments.length;
3603 }
3604
3605 if (endSequence < playlist.mediaSequence) {
3606 return 0;
3607 } // do a backward walk to estimate the duration
3608
3609
3610 var backward = backwardDuration(playlist, endSequence);
3611
3612 if (backward.precise) {
3613 // if we were able to base our duration estimate on timing
3614 // information provided directly from the Media Source, return
3615 // it
3616 return backward.result;
3617 } // walk forward to see if a precise duration estimate can be made
3618 // that way
3619
3620
3621 var forward = forwardDuration(playlist, endSequence);
3622
3623 if (forward.precise) {
3624 // we found a segment that has been buffered and so it's
3625 // position is known precisely
3626 return forward.result;
3627 } // return the less-precise, playlist-based duration estimate
3628
3629
3630 return backward.result + expired;
3631 };
3632 /**
3633 * Calculates the duration of a playlist. If a start and end index
3634 * are specified, the duration will be for the subset of the media
3635 * timeline between those two indices. The total duration for live
3636 * playlists is always Infinity.
3637 *
3638 * @param {Object} playlist a media playlist object
3639 * @param {number=} endSequence an exclusive upper
3640 * boundary for the playlist. Defaults to the playlist media
3641 * sequence number plus its length.
3642 * @param {number=} expired the amount of time that has
3643 * dropped off the front of the playlist in a live scenario
3644 * @return {number} the duration between the start index and end
3645 * index.
3646 */
3647
3648
3649 var duration = function duration(playlist, endSequence, expired) {
3650 if (!playlist) {
3651 return 0;
3652 }
3653
3654 if (typeof expired !== 'number') {
3655 expired = 0;
3656 } // if a slice of the total duration is not requested, use
3657 // playlist-level duration indicators when they're present
3658
3659
3660 if (typeof endSequence === 'undefined') {
3661 // if present, use the duration specified in the playlist
3662 if (playlist.totalDuration) {
3663 return playlist.totalDuration;
3664 } // duration should be Infinity for live playlists
3665
3666
3667 if (!playlist.endList) {
3668 return window__default['default'].Infinity;
3669 }
3670 } // calculate the total duration based on the segment durations
3671
3672
3673 return intervalDuration(playlist, endSequence, expired);
3674 };
3675 /**
3676 * Calculate the time between two indexes in the current playlist
3677 * neight the start- nor the end-index need to be within the current
3678 * playlist in which case, the targetDuration of the playlist is used
3679 * to approximate the durations of the segments
3680 *
3681 * @param {Object} playlist a media playlist object
3682 * @param {number} startIndex
3683 * @param {number} endIndex
3684 * @return {number} the number of seconds between startIndex and endIndex
3685 */
3686
3687 var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
3688 var durations = 0;
3689
3690 if (startIndex > endIndex) {
3691 var _ref = [endIndex, startIndex];
3692 startIndex = _ref[0];
3693 endIndex = _ref[1];
3694 }
3695
3696 if (startIndex < 0) {
3697 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
3698 durations += playlist.targetDuration;
3699 }
3700
3701 startIndex = 0;
3702 }
3703
3704 for (var _i = startIndex; _i < endIndex; _i++) {
3705 durations += playlist.segments[_i].duration;
3706 }
3707
3708 return durations;
3709 };
3710 /**
3711 * Calculates the playlist end time
3712 *
3713 * @param {Object} playlist a media playlist object
3714 * @param {number=} expired the amount of time that has
3715 * dropped off the front of the playlist in a live scenario
3716 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
3717 * playlist end calculation should consider the safe live end
3718 * (truncate the playlist end by three segments). This is normally
3719 * used for calculating the end of the playlist's seekable range.
3720 * This takes into account the value of liveEdgePadding.
3721 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
3722 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
3723 * If this is provided, it is used in the safe live end calculation.
3724 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
3725 * Corresponds to suggestedPresentationDelay in DASH manifests.
3726 * @return {number} the end time of playlist
3727 * @function playlistEnd
3728 */
3729
3730 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
3731 if (!playlist || !playlist.segments) {
3732 return null;
3733 }
3734
3735 if (playlist.endList) {
3736 return duration(playlist);
3737 }
3738
3739 if (expired === null) {
3740 return null;
3741 }
3742
3743 expired = expired || 0;
3744 var lastSegmentTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
3745
3746 if (useSafeLiveEnd) {
3747 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
3748 lastSegmentTime -= liveEdgePadding;
3749 } // don't return a time less than zero
3750
3751
3752 return Math.max(0, lastSegmentTime);
3753 };
3754 /**
3755 * Calculates the interval of time that is currently seekable in a
3756 * playlist. The returned time ranges are relative to the earliest
3757 * moment in the specified playlist that is still available. A full
3758 * seekable implementation for live streams would need to offset
3759 * these values by the duration of content that has expired from the
3760 * stream.
3761 *
3762 * @param {Object} playlist a media playlist object
3763 * dropped off the front of the playlist in a live scenario
3764 * @param {number=} expired the amount of time that has
3765 * dropped off the front of the playlist in a live scenario
3766 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
3767 * Corresponds to suggestedPresentationDelay in DASH manifests.
3768 * @return {TimeRanges} the periods of time that are valid targets
3769 * for seeking
3770 */
3771
3772 var seekable = function seekable(playlist, expired, liveEdgePadding) {
3773 var useSafeLiveEnd = true;
3774 var seekableStart = expired || 0;
3775 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
3776
3777 if (seekableEnd === null) {
3778 return createTimeRange();
3779 }
3780
3781 return createTimeRange(seekableStart, seekableEnd);
3782 };
3783 /**
3784 * Determine the index and estimated starting time of the segment that
3785 * contains a specified playback position in a media playlist.
3786 *
3787 * @param {Object} playlist the media playlist to query
3788 * @param {number} currentTime The number of seconds since the earliest
3789 * possible position to determine the containing segment for
3790 * @param {number} startIndex
3791 * @param {number} startTime
3792 * @return {Object}
3793 */
3794
3795 var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
3796 var partsAndSegments = getPartsAndSegments(playlist);
3797 var time = currentTime - startTime;
3798
3799 if (time < 0) {
3800 // Walk backward from startIndex in the playlist, adding durations
3801 // until we find a segment that contains `time` and return it
3802 if (startIndex > 0) {
3803 for (var i = startIndex - 1; i >= 0; i--) {
3804 var segment = partsAndSegments[i];
3805 time += segment.duration + TIME_FUDGE_FACTOR;
3806
3807 if (time > 0) {
3808 return {
3809 mediaIndex: segment.segmentIndex,
3810 startTime: startTime - sumDurations(playlist, startIndex, segment.segmentIndex),
3811 partIndex: segment.partIndex
3812 };
3813 }
3814 }
3815 } // We were unable to find a good segment within the playlist
3816 // so select the first segment
3817
3818
3819 return {
3820 mediaIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3821 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3822 startTime: currentTime
3823 };
3824 } // When startIndex is negative, we first walk forward to first segment
3825 // adding target durations. If we "run out of time" before getting to
3826 // the first segment, return the first segment
3827
3828
3829 if (startIndex < 0) {
3830 for (var _i2 = startIndex; _i2 < 0; _i2++) {
3831 time -= playlist.targetDuration;
3832
3833 if (time < 0) {
3834 return {
3835 mediaIndex: partsAndSegments[0].segmentIndex,
3836 startTime: currentTime
3837 };
3838 }
3839 }
3840
3841 startIndex = 0;
3842 } // Walk forward from startIndex in the playlist, subtracting durations
3843 // until we find a segment that contains `time` and return it
3844
3845
3846 for (var _i3 = startIndex; _i3 < partsAndSegments.length; _i3++) {
3847 var partSegment = partsAndSegments[_i3];
3848 time -= partSegment.duration + TIME_FUDGE_FACTOR;
3849
3850 if (time < 0) {
3851 return {
3852 mediaIndex: partSegment.segmentIndex,
3853 startTime: startTime + sumDurations(playlist, startIndex, partSegment.segmentIndex),
3854 partIndex: partSegment.partIndex
3855 };
3856 }
3857 } // We are out of possible candidates so load the last one...
3858
3859
3860 return {
3861 mediaIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
3862 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
3863 startTime: currentTime
3864 };
3865 };
3866 /**
3867 * Check whether the playlist is blacklisted or not.
3868 *
3869 * @param {Object} playlist the media playlist object
3870 * @return {boolean} whether the playlist is blacklisted or not
3871 * @function isBlacklisted
3872 */
3873
3874 var isBlacklisted = function isBlacklisted(playlist) {
3875 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
3876 };
3877 /**
3878 * Check whether the playlist is compatible with current playback configuration or has
3879 * been blacklisted permanently for being incompatible.
3880 *
3881 * @param {Object} playlist the media playlist object
3882 * @return {boolean} whether the playlist is incompatible or not
3883 * @function isIncompatible
3884 */
3885
3886 var isIncompatible = function isIncompatible(playlist) {
3887 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
3888 };
3889 /**
3890 * Check whether the playlist is enabled or not.
3891 *
3892 * @param {Object} playlist the media playlist object
3893 * @return {boolean} whether the playlist is enabled or not
3894 * @function isEnabled
3895 */
3896
3897 var isEnabled = function isEnabled(playlist) {
3898 var blacklisted = isBlacklisted(playlist);
3899 return !playlist.disabled && !blacklisted;
3900 };
3901 /**
3902 * Check whether the playlist has been manually disabled through the representations api.
3903 *
3904 * @param {Object} playlist the media playlist object
3905 * @return {boolean} whether the playlist is disabled manually or not
3906 * @function isDisabled
3907 */
3908
3909 var isDisabled = function isDisabled(playlist) {
3910 return playlist.disabled;
3911 };
3912 /**
3913 * Returns whether the current playlist is an AES encrypted HLS stream
3914 *
3915 * @return {boolean} true if it's an AES encrypted HLS stream
3916 */
3917
3918 var isAes = function isAes(media) {
3919 for (var i = 0; i < media.segments.length; i++) {
3920 if (media.segments[i].key) {
3921 return true;
3922 }
3923 }
3924
3925 return false;
3926 };
3927 /**
3928 * Checks if the playlist has a value for the specified attribute
3929 *
3930 * @param {string} attr
3931 * Attribute to check for
3932 * @param {Object} playlist
3933 * The media playlist object
3934 * @return {boolean}
3935 * Whether the playlist contains a value for the attribute or not
3936 * @function hasAttribute
3937 */
3938
3939 var hasAttribute = function hasAttribute(attr, playlist) {
3940 return playlist.attributes && playlist.attributes[attr];
3941 };
3942 /**
3943 * Estimates the time required to complete a segment download from the specified playlist
3944 *
3945 * @param {number} segmentDuration
3946 * Duration of requested segment
3947 * @param {number} bandwidth
3948 * Current measured bandwidth of the player
3949 * @param {Object} playlist
3950 * The media playlist object
3951 * @param {number=} bytesReceived
3952 * Number of bytes already received for the request. Defaults to 0
3953 * @return {number|NaN}
3954 * The estimated time to request the segment. NaN if bandwidth information for
3955 * the given playlist is unavailable
3956 * @function estimateSegmentRequestTime
3957 */
3958
3959 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
3960 if (bytesReceived === void 0) {
3961 bytesReceived = 0;
3962 }
3963
3964 if (!hasAttribute('BANDWIDTH', playlist)) {
3965 return NaN;
3966 }
3967
3968 var size = segmentDuration * playlist.attributes.BANDWIDTH;
3969 return (size - bytesReceived * 8) / bandwidth;
3970 };
3971 /*
3972 * Returns whether the current playlist is the lowest rendition
3973 *
3974 * @return {Boolean} true if on lowest rendition
3975 */
3976
3977 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
3978 if (master.playlists.length === 1) {
3979 return true;
3980 }
3981
3982 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3983 return master.playlists.filter(function (playlist) {
3984 if (!isEnabled(playlist)) {
3985 return false;
3986 }
3987
3988 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3989 }).length === 0;
3990 };
3991 var playlistMatch = function playlistMatch(a, b) {
3992 // both playlits are null
3993 // or only one playlist is non-null
3994 // no match
3995 if (!a && !b || !a && b || a && !b) {
3996 return false;
3997 } // playlist objects are the same, match
3998
3999
4000 if (a === b) {
4001 return true;
4002 } // first try to use id as it should be the most
4003 // accurate
4004
4005
4006 if (a.id && b.id && a.id === b.id) {
4007 return true;
4008 } // next try to use reslovedUri as it should be the
4009 // second most accurate.
4010
4011
4012 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
4013 return true;
4014 } // finally try to use uri as it should be accurate
4015 // but might miss a few cases for relative uris
4016
4017
4018 if (a.uri && b.uri && a.uri === b.uri) {
4019 return true;
4020 }
4021
4022 return false;
4023 };
4024
4025 var someAudioVariant = function someAudioVariant(master, callback) {
4026 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
4027 var found = false;
4028
4029 for (var groupName in AUDIO) {
4030 for (var label in AUDIO[groupName]) {
4031 found = callback(AUDIO[groupName][label]);
4032
4033 if (found) {
4034 break;
4035 }
4036 }
4037
4038 if (found) {
4039 break;
4040 }
4041 }
4042
4043 return !!found;
4044 };
4045
4046 var isAudioOnly = function isAudioOnly(master) {
4047 // we are audio only if we have no main playlists but do
4048 // have media group playlists.
4049 if (!master || !master.playlists || !master.playlists.length) {
4050 // without audio variants or playlists this
4051 // is not an audio only master.
4052 var found = someAudioVariant(master, function (variant) {
4053 return variant.playlists && variant.playlists.length || variant.uri;
4054 });
4055 return found;
4056 } // if every playlist has only an audio codec it is audio only
4057
4058
4059 var _loop = function _loop(i) {
4060 var playlist = master.playlists[i];
4061 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
4062
4063 if (CODECS && CODECS.split(',').every(function (c) {
4064 return isAudioCodec(c);
4065 })) {
4066 return "continue";
4067 } // playlist is in an audio group it is audio only
4068
4069
4070 var found = someAudioVariant(master, function (variant) {
4071 return playlistMatch(playlist, variant);
4072 });
4073
4074 if (found) {
4075 return "continue";
4076 } // if we make it here this playlist isn't audio and we
4077 // are not audio only
4078
4079
4080 return {
4081 v: false
4082 };
4083 };
4084
4085 for (var i = 0; i < master.playlists.length; i++) {
4086 var _ret = _loop(i);
4087
4088 if (_ret === "continue") continue;
4089 if (typeof _ret === "object") return _ret.v;
4090 } // if we make it past every playlist without returning, then
4091 // this is an audio only playlist.
4092
4093
4094 return true;
4095 }; // exports
4096
4097 var Playlist = {
4098 liveEdgeDelay: liveEdgeDelay,
4099 duration: duration,
4100 seekable: seekable,
4101 getMediaInfoForTime: getMediaInfoForTime,
4102 isEnabled: isEnabled,
4103 isDisabled: isDisabled,
4104 isBlacklisted: isBlacklisted,
4105 isIncompatible: isIncompatible,
4106 playlistEnd: playlistEnd,
4107 isAes: isAes,
4108 hasAttribute: hasAttribute,
4109 estimateSegmentRequestTime: estimateSegmentRequestTime,
4110 isLowestEnabledRendition: isLowestEnabledRendition,
4111 isAudioOnly: isAudioOnly,
4112 playlistMatch: playlistMatch
4113 };
4114
4115 /**
4116 * @file xhr.js
4117 */
4118 var videojsXHR = videojs__default['default'].xhr,
4119 mergeOptions$1 = videojs__default['default'].mergeOptions;
4120
4121 var callbackWrapper = function callbackWrapper(request, error, response, callback) {
4122 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
4123
4124 if (!error && reqResponse) {
4125 request.responseTime = Date.now();
4126 request.roundTripTime = request.responseTime - request.requestTime;
4127 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
4128
4129 if (!request.bandwidth) {
4130 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
4131 }
4132 }
4133
4134 if (response.headers) {
4135 request.responseHeaders = response.headers;
4136 } // videojs.xhr now uses a specific code on the error
4137 // object to signal that a request has timed out instead
4138 // of setting a boolean on the request object
4139
4140
4141 if (error && error.code === 'ETIMEDOUT') {
4142 request.timedout = true;
4143 } // videojs.xhr no longer considers status codes outside of 200 and 0
4144 // (for file uris) to be errors, but the old XHR did, so emulate that
4145 // behavior. Status 206 may be used in response to byterange requests.
4146
4147
4148 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
4149 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
4150 }
4151
4152 callback(error, request);
4153 };
4154
4155 var xhrFactory = function xhrFactory() {
4156 var xhr = function XhrFunction(options, callback) {
4157 // Add a default timeout
4158 options = mergeOptions$1({
4159 timeout: 45e3
4160 }, options); // Allow an optional user-specified function to modify the option
4161 // object before we construct the xhr request
4162
4163 var beforeRequest = XhrFunction.beforeRequest || videojs__default['default'].Vhs.xhr.beforeRequest;
4164
4165 if (beforeRequest && typeof beforeRequest === 'function') {
4166 var newOptions = beforeRequest(options);
4167
4168 if (newOptions) {
4169 options = newOptions;
4170 }
4171 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
4172 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
4173
4174
4175 var xhrMethod = videojs__default['default'].Vhs.xhr.original === true ? videojsXHR : videojs__default['default'].Vhs.xhr;
4176 var request = xhrMethod(options, function (error, response) {
4177 return callbackWrapper(request, error, response, callback);
4178 });
4179 var originalAbort = request.abort;
4180
4181 request.abort = function () {
4182 request.aborted = true;
4183 return originalAbort.apply(request, arguments);
4184 };
4185
4186 request.uri = options.uri;
4187 request.requestTime = Date.now();
4188 return request;
4189 };
4190
4191 xhr.original = true;
4192 return xhr;
4193 };
4194 /**
4195 * Turns segment byterange into a string suitable for use in
4196 * HTTP Range requests
4197 *
4198 * @param {Object} byterange - an object with two values defining the start and end
4199 * of a byte-range
4200 */
4201
4202
4203 var byterangeStr = function byterangeStr(byterange) {
4204 // `byterangeEnd` is one less than `offset + length` because the HTTP range
4205 // header uses inclusive ranges
4206 var byterangeEnd = byterange.offset + byterange.length - 1;
4207 var byterangeStart = byterange.offset;
4208 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
4209 };
4210 /**
4211 * Defines headers for use in the xhr request for a particular segment.
4212 *
4213 * @param {Object} segment - a simplified copy of the segmentInfo object
4214 * from SegmentLoader
4215 */
4216
4217
4218 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
4219 var headers = {};
4220
4221 if (segment.byterange) {
4222 headers.Range = byterangeStr(segment.byterange);
4223 }
4224
4225 return headers;
4226 };
4227
4228 var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
4229 var DASH_REGEX = /^application\/dash\+xml/i;
4230 /**
4231 * Returns a string that describes the type of source based on a video source object's
4232 * media type.
4233 *
4234 * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
4235 *
4236 * @param {string} type
4237 * Video source object media type
4238 * @return {('hls'|'dash'|'vhs-json'|null)}
4239 * VHS source type string
4240 */
4241
4242 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
4243 if (MPEGURL_REGEX.test(type)) {
4244 return 'hls';
4245 }
4246
4247 if (DASH_REGEX.test(type)) {
4248 return 'dash';
4249 } // Denotes the special case of a manifest object passed to http-streaming instead of a
4250 // source URL.
4251 //
4252 // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
4253 //
4254 // In this case, vnd stands for vendor, video.js for the organization, VHS for this
4255 // project, and the +json suffix identifies the structure of the media type.
4256
4257
4258 if (type === 'application/vnd.videojs.vhs+json') {
4259 return 'vhs-json';
4260 }
4261
4262 return null;
4263 };
4264
4265 /**
4266 * @file bin-utils.js
4267 */
4268
4269 /**
4270 * convert a TimeRange to text
4271 *
4272 * @param {TimeRange} range the timerange to use for conversion
4273 * @param {number} i the iterator on the range to convert
4274 * @return {string} the range in string format
4275 */
4276 var textRange = function textRange(range, i) {
4277 return range.start(i) + '-' + range.end(i);
4278 };
4279 /**
4280 * format a number as hex string
4281 *
4282 * @param {number} e The number
4283 * @param {number} i the iterator
4284 * @return {string} the hex formatted number as a string
4285 */
4286
4287
4288 var formatHexString = function formatHexString(e, i) {
4289 var value = e.toString(16);
4290 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
4291 };
4292
4293 var formatAsciiString = function formatAsciiString(e) {
4294 if (e >= 0x20 && e < 0x7e) {
4295 return String.fromCharCode(e);
4296 }
4297
4298 return '.';
4299 };
4300 /**
4301 * Creates an object for sending to a web worker modifying properties that are TypedArrays
4302 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
4303 *
4304 * @param {Object} message
4305 * Object of properties and values to send to the web worker
4306 * @return {Object}
4307 * Modified message with TypedArray values expanded
4308 * @function createTransferableMessage
4309 */
4310
4311
4312 var createTransferableMessage = function createTransferableMessage(message) {
4313 var transferable = {};
4314 Object.keys(message).forEach(function (key) {
4315 var value = message[key];
4316
4317 if (ArrayBuffer.isView(value)) {
4318 transferable[key] = {
4319 bytes: value.buffer,
4320 byteOffset: value.byteOffset,
4321 byteLength: value.byteLength
4322 };
4323 } else {
4324 transferable[key] = value;
4325 }
4326 });
4327 return transferable;
4328 };
4329 /**
4330 * Returns a unique string identifier for a media initialization
4331 * segment.
4332 *
4333 * @param {Object} initSegment
4334 * the init segment object.
4335 *
4336 * @return {string} the generated init segment id
4337 */
4338
4339 var initSegmentId = function initSegmentId(initSegment) {
4340 var byterange = initSegment.byterange || {
4341 length: Infinity,
4342 offset: 0
4343 };
4344 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
4345 };
4346 /**
4347 * Returns a unique string identifier for a media segment key.
4348 *
4349 * @param {Object} key the encryption key
4350 * @return {string} the unique id for the media segment key.
4351 */
4352
4353 var segmentKeyId = function segmentKeyId(key) {
4354 return key.resolvedUri;
4355 };
4356 /**
4357 * utils to help dump binary data to the console
4358 *
4359 * @param {Array|TypedArray} data
4360 * data to dump to a string
4361 *
4362 * @return {string} the data as a hex string.
4363 */
4364
4365 var hexDump = function hexDump(data) {
4366 var bytes = Array.prototype.slice.call(data);
4367 var step = 16;
4368 var result = '';
4369 var hex;
4370 var ascii;
4371
4372 for (var j = 0; j < bytes.length / step; j++) {
4373 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
4374 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
4375 result += hex + ' ' + ascii + '\n';
4376 }
4377
4378 return result;
4379 };
4380 var tagDump = function tagDump(_ref) {
4381 var bytes = _ref.bytes;
4382 return hexDump(bytes);
4383 };
4384 var textRanges = function textRanges(ranges) {
4385 var result = '';
4386 var i;
4387
4388 for (i = 0; i < ranges.length; i++) {
4389 result += textRange(ranges, i) + ' ';
4390 }
4391
4392 return result;
4393 };
4394
4395 var utils$1 = /*#__PURE__*/Object.freeze({
4396 __proto__: null,
4397 createTransferableMessage: createTransferableMessage,
4398 initSegmentId: initSegmentId,
4399 segmentKeyId: segmentKeyId,
4400 hexDump: hexDump,
4401 tagDump: tagDump,
4402 textRanges: textRanges
4403 });
4404
4405 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
4406 // 25% was arbitrarily chosen, and may need to be refined over time.
4407
4408 var SEGMENT_END_FUDGE_PERCENT = 0.25;
4409 /**
4410 * Converts a player time (any time that can be gotten/set from player.currentTime(),
4411 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
4412 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
4413 *
4414 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
4415 * point" (a point where we have a mapping from program time to player time, with player
4416 * time being the post transmux start of the segment).
4417 *
4418 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
4419 *
4420 * @param {number} playerTime the player time
4421 * @param {Object} segment the segment which contains the player time
4422 * @return {Date} program time
4423 */
4424
4425 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
4426 if (!segment.dateTimeObject) {
4427 // Can't convert without an "anchor point" for the program time (i.e., a time that can
4428 // be used to map the start of a segment with a real world time).
4429 return null;
4430 }
4431
4432 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
4433 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
4434
4435 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
4436 var offsetFromSegmentStart = playerTime - startOfSegment;
4437 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
4438 };
4439 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
4440 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
4441 };
4442 /**
4443 * Finds a segment that contains the time requested given as an ISO-8601 string. The
4444 * returned segment might be an estimate or an accurate match.
4445 *
4446 * @param {string} programTime The ISO-8601 programTime to find a match for
4447 * @param {Object} playlist A playlist object to search within
4448 */
4449
4450 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
4451 // Assumptions:
4452 // - verifyProgramDateTimeTags has already been run
4453 // - live streams have been started
4454 var dateTimeObject;
4455
4456 try {
4457 dateTimeObject = new Date(programTime);
4458 } catch (e) {
4459 return null;
4460 }
4461
4462 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
4463 return null;
4464 }
4465
4466 var segment = playlist.segments[0];
4467
4468 if (dateTimeObject < segment.dateTimeObject) {
4469 // Requested time is before stream start.
4470 return null;
4471 }
4472
4473 for (var i = 0; i < playlist.segments.length - 1; i++) {
4474 segment = playlist.segments[i];
4475 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
4476
4477 if (dateTimeObject < nextSegmentStart) {
4478 break;
4479 }
4480 }
4481
4482 var lastSegment = playlist.segments[playlist.segments.length - 1];
4483 var lastSegmentStart = lastSegment.dateTimeObject;
4484 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
4485 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
4486
4487 if (dateTimeObject > lastSegmentEnd) {
4488 // Beyond the end of the stream, or our best guess of the end of the stream.
4489 return null;
4490 }
4491
4492 if (dateTimeObject > lastSegmentStart) {
4493 segment = lastSegment;
4494 }
4495
4496 return {
4497 segment: segment,
4498 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
4499 // Although, given that all segments have accurate date time objects, the segment
4500 // selected should be accurate, unless the video has been transmuxed at some point
4501 // (determined by the presence of the videoTimingInfo object), the segment's "player
4502 // time" (the start time in the player) can't be considered accurate.
4503 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
4504 };
4505 };
4506 /**
4507 * Finds a segment that contains the given player time(in seconds).
4508 *
4509 * @param {number} time The player time to find a match for
4510 * @param {Object} playlist A playlist object to search within
4511 */
4512
4513 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
4514 // Assumptions:
4515 // - there will always be a segment.duration
4516 // - we can start from zero
4517 // - segments are in time order
4518 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
4519 return null;
4520 }
4521
4522 var segmentEnd = 0;
4523 var segment;
4524
4525 for (var i = 0; i < playlist.segments.length; i++) {
4526 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
4527 // should contain the most accurate values we have for the segment's player times.
4528 //
4529 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
4530 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
4531 // calculate an end value.
4532
4533 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
4534
4535 if (time <= segmentEnd) {
4536 break;
4537 }
4538 }
4539
4540 var lastSegment = playlist.segments[playlist.segments.length - 1];
4541
4542 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
4543 // The time requested is beyond the stream end.
4544 return null;
4545 }
4546
4547 if (time > segmentEnd) {
4548 // The time is within or beyond the last segment.
4549 //
4550 // Check to see if the time is beyond a reasonable guess of the end of the stream.
4551 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
4552 // Technically, because the duration value is only an estimate, the time may still
4553 // exist in the last segment, however, there isn't enough information to make even
4554 // a reasonable estimate.
4555 return null;
4556 }
4557
4558 segment = lastSegment;
4559 }
4560
4561 return {
4562 segment: segment,
4563 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
4564 // Because videoTimingInfo is only set after transmux, it is the only way to get
4565 // accurate timing values.
4566 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
4567 };
4568 };
4569 /**
4570 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
4571 * If the offset returned is positive, the programTime occurs after the
4572 * comparisonTimestamp.
4573 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
4574 *
4575 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
4576 * @param {string} programTime The programTime as an ISO-8601 string
4577 * @return {number} offset
4578 */
4579
4580 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
4581 var segmentDateTime;
4582 var programDateTime;
4583
4584 try {
4585 segmentDateTime = new Date(comparisonTimeStamp);
4586 programDateTime = new Date(programTime);
4587 } catch (e) {// TODO handle error
4588 }
4589
4590 var segmentTimeEpoch = segmentDateTime.getTime();
4591 var programTimeEpoch = programDateTime.getTime();
4592 return (programTimeEpoch - segmentTimeEpoch) / 1000;
4593 };
4594 /**
4595 * Checks that all segments in this playlist have programDateTime tags.
4596 *
4597 * @param {Object} playlist A playlist object
4598 */
4599
4600 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
4601 if (!playlist.segments || playlist.segments.length === 0) {
4602 return false;
4603 }
4604
4605 for (var i = 0; i < playlist.segments.length; i++) {
4606 var segment = playlist.segments[i];
4607
4608 if (!segment.dateTimeObject) {
4609 return false;
4610 }
4611 }
4612
4613 return true;
4614 };
4615 /**
4616 * Returns the programTime of the media given a playlist and a playerTime.
4617 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
4618 * If the segments containing the time requested have not been buffered yet, an estimate
4619 * may be returned to the callback.
4620 *
4621 * @param {Object} args
4622 * @param {Object} args.playlist A playlist object to search within
4623 * @param {number} time A playerTime in seconds
4624 * @param {Function} callback(err, programTime)
4625 * @return {string} err.message A detailed error message
4626 * @return {Object} programTime
4627 * @return {number} programTime.mediaSeconds The streamTime in seconds
4628 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
4629 */
4630
4631 var getProgramTime = function getProgramTime(_ref) {
4632 var playlist = _ref.playlist,
4633 _ref$time = _ref.time,
4634 time = _ref$time === void 0 ? undefined : _ref$time,
4635 callback = _ref.callback;
4636
4637 if (!callback) {
4638 throw new Error('getProgramTime: callback must be provided');
4639 }
4640
4641 if (!playlist || time === undefined) {
4642 return callback({
4643 message: 'getProgramTime: playlist and time must be provided'
4644 });
4645 }
4646
4647 var matchedSegment = findSegmentForPlayerTime(time, playlist);
4648
4649 if (!matchedSegment) {
4650 return callback({
4651 message: 'valid programTime was not found'
4652 });
4653 }
4654
4655 if (matchedSegment.type === 'estimate') {
4656 return callback({
4657 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
4658 seekTime: matchedSegment.estimatedStart
4659 });
4660 }
4661
4662 var programTimeObject = {
4663 mediaSeconds: time
4664 };
4665 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
4666
4667 if (programTime) {
4668 programTimeObject.programDateTime = programTime.toISOString();
4669 }
4670
4671 return callback(null, programTimeObject);
4672 };
4673 /**
4674 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
4675 *
4676 * @param {Object} args
4677 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
4678 * @param {Object} args.playlist A playlist to look within
4679 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
4680 * @param {Function} args.seekTo A method to perform a seek
4681 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
4682 * @param {Object} args.tech The tech to seek on
4683 * @param {Function} args.callback(err, newTime) A callback to return the new time to
4684 * @return {string} err.message A detailed error message
4685 * @return {number} newTime The exact time that was seeked to in seconds
4686 */
4687
4688 var seekToProgramTime = function seekToProgramTime(_ref2) {
4689 var programTime = _ref2.programTime,
4690 playlist = _ref2.playlist,
4691 _ref2$retryCount = _ref2.retryCount,
4692 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
4693 seekTo = _ref2.seekTo,
4694 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
4695 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
4696 tech = _ref2.tech,
4697 callback = _ref2.callback;
4698
4699 if (!callback) {
4700 throw new Error('seekToProgramTime: callback must be provided');
4701 }
4702
4703 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
4704 return callback({
4705 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
4706 });
4707 }
4708
4709 if (!playlist.endList && !tech.hasStarted_) {
4710 return callback({
4711 message: 'player must be playing a live stream to start buffering'
4712 });
4713 }
4714
4715 if (!verifyProgramDateTimeTags(playlist)) {
4716 return callback({
4717 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
4718 });
4719 }
4720
4721 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
4722
4723 if (!matchedSegment) {
4724 return callback({
4725 message: programTime + " was not found in the stream"
4726 });
4727 }
4728
4729 var segment = matchedSegment.segment;
4730 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
4731
4732 if (matchedSegment.type === 'estimate') {
4733 // we've run out of retries
4734 if (retryCount === 0) {
4735 return callback({
4736 message: programTime + " is not buffered yet. Try again"
4737 });
4738 }
4739
4740 seekTo(matchedSegment.estimatedStart + mediaOffset);
4741 tech.one('seeked', function () {
4742 seekToProgramTime({
4743 programTime: programTime,
4744 playlist: playlist,
4745 retryCount: retryCount - 1,
4746 seekTo: seekTo,
4747 pauseAfterSeek: pauseAfterSeek,
4748 tech: tech,
4749 callback: callback
4750 });
4751 });
4752 return;
4753 } // Since the segment.start value is determined from the buffered end or ending time
4754 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
4755 // modifications.
4756
4757
4758 var seekToTime = segment.start + mediaOffset;
4759
4760 var seekedCallback = function seekedCallback() {
4761 return callback(null, tech.currentTime());
4762 }; // listen for seeked event
4763
4764
4765 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
4766
4767 if (pauseAfterSeek) {
4768 tech.pause();
4769 }
4770
4771 seekTo(seekToTime);
4772 };
4773
4774 /*! @name mpd-parser @version 0.16.0 @license Apache-2.0 */
4775
4776 var isObject = function isObject(obj) {
4777 return !!obj && typeof obj === 'object';
4778 };
4779
4780 var merge = function merge() {
4781 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
4782 objects[_key] = arguments[_key];
4783 }
4784
4785 return objects.reduce(function (result, source) {
4786 if (typeof source !== 'object') {
4787 return result;
4788 }
4789
4790 Object.keys(source).forEach(function (key) {
4791 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
4792 result[key] = result[key].concat(source[key]);
4793 } else if (isObject(result[key]) && isObject(source[key])) {
4794 result[key] = merge(result[key], source[key]);
4795 } else {
4796 result[key] = source[key];
4797 }
4798 });
4799 return result;
4800 }, {});
4801 };
4802
4803 var values = function values(o) {
4804 return Object.keys(o).map(function (k) {
4805 return o[k];
4806 });
4807 };
4808
4809 var range = function range(start, end) {
4810 var result = [];
4811
4812 for (var i = start; i < end; i++) {
4813 result.push(i);
4814 }
4815
4816 return result;
4817 };
4818
4819 var flatten = function flatten(lists) {
4820 return lists.reduce(function (x, y) {
4821 return x.concat(y);
4822 }, []);
4823 };
4824
4825 var from = function from(list) {
4826 if (!list.length) {
4827 return [];
4828 }
4829
4830 var result = [];
4831
4832 for (var i = 0; i < list.length; i++) {
4833 result.push(list[i]);
4834 }
4835
4836 return result;
4837 };
4838
4839 var findIndexes = function findIndexes(l, key) {
4840 return l.reduce(function (a, e, i) {
4841 if (e[key]) {
4842 a.push(i);
4843 }
4844
4845 return a;
4846 }, []);
4847 };
4848
4849 var errors = {
4850 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
4851 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
4852 DASH_INVALID_XML: 'DASH_INVALID_XML',
4853 NO_BASE_URL: 'NO_BASE_URL',
4854 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
4855 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
4856 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
4857 };
4858 /**
4859 * @typedef {Object} SingleUri
4860 * @property {string} uri - relative location of segment
4861 * @property {string} resolvedUri - resolved location of segment
4862 * @property {Object} byterange - Object containing information on how to make byte range
4863 * requests following byte-range-spec per RFC2616.
4864 * @property {String} byterange.length - length of range request
4865 * @property {String} byterange.offset - byte offset of range request
4866 *
4867 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
4868 */
4869
4870 /**
4871 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
4872 * that conforms to how m3u8-parser is structured
4873 *
4874 * @see https://github.com/videojs/m3u8-parser
4875 *
4876 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
4877 * @param {string} source - source url for segment
4878 * @param {string} range - optional range used for range calls,
4879 * follows RFC 2616, Clause 14.35.1
4880 * @return {SingleUri} full segment information transformed into a format similar
4881 * to m3u8-parser
4882 */
4883
4884 var urlTypeToSegment = function urlTypeToSegment(_ref) {
4885 var _ref$baseUrl = _ref.baseUrl,
4886 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
4887 _ref$source = _ref.source,
4888 source = _ref$source === void 0 ? '' : _ref$source,
4889 _ref$range = _ref.range,
4890 range = _ref$range === void 0 ? '' : _ref$range,
4891 _ref$indexRange = _ref.indexRange,
4892 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
4893 var segment = {
4894 uri: source,
4895 resolvedUri: resolveUrl$1(baseUrl || '', source)
4896 };
4897
4898 if (range || indexRange) {
4899 var rangeStr = range ? range : indexRange;
4900 var ranges = rangeStr.split('-');
4901 var startRange = parseInt(ranges[0], 10);
4902 var endRange = parseInt(ranges[1], 10); // byterange should be inclusive according to
4903 // RFC 2616, Clause 14.35.1
4904
4905 segment.byterange = {
4906 length: endRange - startRange + 1,
4907 offset: startRange
4908 };
4909 }
4910
4911 return segment;
4912 };
4913
4914 var byteRangeToString = function byteRangeToString(byterange) {
4915 // `endRange` is one less than `offset + length` because the HTTP range
4916 // header uses inclusive ranges
4917 var endRange = byterange.offset + byterange.length - 1;
4918 return byterange.offset + "-" + endRange;
4919 };
4920 /**
4921 * parse the end number attribue that can be a string
4922 * number, or undefined.
4923 *
4924 * @param {string|number|undefined} endNumber
4925 * The end number attribute.
4926 *
4927 * @return {number|null}
4928 * The result of parsing the end number.
4929 */
4930
4931
4932 var parseEndNumber = function parseEndNumber(endNumber) {
4933 if (endNumber && typeof endNumber !== 'number') {
4934 endNumber = parseInt(endNumber, 10);
4935 }
4936
4937 if (isNaN(endNumber)) {
4938 return null;
4939 }
4940
4941 return endNumber;
4942 };
4943 /**
4944 * Functions for calculating the range of available segments in static and dynamic
4945 * manifests.
4946 */
4947
4948
4949 var segmentRange = {
4950 /**
4951 * Returns the entire range of available segments for a static MPD
4952 *
4953 * @param {Object} attributes
4954 * Inheritied MPD attributes
4955 * @return {{ start: number, end: number }}
4956 * The start and end numbers for available segments
4957 */
4958 static: function _static(attributes) {
4959 var duration = attributes.duration,
4960 _attributes$timescale = attributes.timescale,
4961 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
4962 sourceDuration = attributes.sourceDuration;
4963 var endNumber = parseEndNumber(attributes.endNumber);
4964 return {
4965 start: 0,
4966 end: typeof endNumber === 'number' ? endNumber : Math.ceil(sourceDuration / (duration / timescale))
4967 };
4968 },
4969
4970 /**
4971 * Returns the current live window range of available segments for a dynamic MPD
4972 *
4973 * @param {Object} attributes
4974 * Inheritied MPD attributes
4975 * @return {{ start: number, end: number }}
4976 * The start and end numbers for available segments
4977 */
4978 dynamic: function dynamic(attributes) {
4979 var NOW = attributes.NOW,
4980 clientOffset = attributes.clientOffset,
4981 availabilityStartTime = attributes.availabilityStartTime,
4982 _attributes$timescale2 = attributes.timescale,
4983 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
4984 duration = attributes.duration,
4985 _attributes$start = attributes.start,
4986 start = _attributes$start === void 0 ? 0 : _attributes$start,
4987 _attributes$minimumUp = attributes.minimumUpdatePeriod,
4988 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
4989 _attributes$timeShift = attributes.timeShiftBufferDepth,
4990 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
4991 var endNumber = parseEndNumber(attributes.endNumber);
4992 var now = (NOW + clientOffset) / 1000;
4993 var periodStartWC = availabilityStartTime + start;
4994 var periodEndWC = now + minimumUpdatePeriod;
4995 var periodDuration = periodEndWC - periodStartWC;
4996 var segmentCount = Math.ceil(periodDuration * timescale / duration);
4997 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
4998 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
4999 return {
5000 start: Math.max(0, availableStart),
5001 end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)
5002 };
5003 }
5004 };
5005 /**
5006 * Maps a range of numbers to objects with information needed to build the corresponding
5007 * segment list
5008 *
5009 * @name toSegmentsCallback
5010 * @function
5011 * @param {number} number
5012 * Number of the segment
5013 * @param {number} index
5014 * Index of the number in the range list
5015 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
5016 * Object with segment timing and duration info
5017 */
5018
5019 /**
5020 * Returns a callback for Array.prototype.map for mapping a range of numbers to
5021 * information needed to build the segment list.
5022 *
5023 * @param {Object} attributes
5024 * Inherited MPD attributes
5025 * @return {toSegmentsCallback}
5026 * Callback map function
5027 */
5028
5029 var toSegments = function toSegments(attributes) {
5030 return function (number, index) {
5031 var duration = attributes.duration,
5032 _attributes$timescale3 = attributes.timescale,
5033 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
5034 periodIndex = attributes.periodIndex,
5035 _attributes$startNumb = attributes.startNumber,
5036 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
5037 return {
5038 number: startNumber + number,
5039 duration: duration / timescale,
5040 timeline: periodIndex,
5041 time: index * duration
5042 };
5043 };
5044 };
5045 /**
5046 * Returns a list of objects containing segment timing and duration info used for
5047 * building the list of segments. This uses the @duration attribute specified
5048 * in the MPD manifest to derive the range of segments.
5049 *
5050 * @param {Object} attributes
5051 * Inherited MPD attributes
5052 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5053 * List of Objects with segment timing and duration info
5054 */
5055
5056
5057 var parseByDuration = function parseByDuration(attributes) {
5058 var _attributes$type = attributes.type,
5059 type = _attributes$type === void 0 ? 'static' : _attributes$type,
5060 duration = attributes.duration,
5061 _attributes$timescale4 = attributes.timescale,
5062 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
5063 sourceDuration = attributes.sourceDuration;
5064
5065 var _segmentRange$type = segmentRange[type](attributes),
5066 start = _segmentRange$type.start,
5067 end = _segmentRange$type.end;
5068
5069 var segments = range(start, end).map(toSegments(attributes));
5070
5071 if (type === 'static') {
5072 var index = segments.length - 1; // final segment may be less than full segment duration
5073
5074 segments[index].duration = sourceDuration - duration / timescale * index;
5075 }
5076
5077 return segments;
5078 };
5079 /**
5080 * Translates SegmentBase into a set of segments.
5081 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
5082 * node should be translated into segment.
5083 *
5084 * @param {Object} attributes
5085 * Object containing all inherited attributes from parent elements with attribute
5086 * names as keys
5087 * @return {Object.<Array>} list of segments
5088 */
5089
5090
5091 var segmentsFromBase = function segmentsFromBase(attributes) {
5092 var baseUrl = attributes.baseUrl,
5093 _attributes$initializ = attributes.initialization,
5094 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
5095 sourceDuration = attributes.sourceDuration,
5096 _attributes$indexRang = attributes.indexRange,
5097 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
5098 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
5099
5100 if (!baseUrl) {
5101 throw new Error(errors.NO_BASE_URL);
5102 }
5103
5104 var initSegment = urlTypeToSegment({
5105 baseUrl: baseUrl,
5106 source: initialization.sourceURL,
5107 range: initialization.range
5108 });
5109 var segment = urlTypeToSegment({
5110 baseUrl: baseUrl,
5111 source: baseUrl,
5112 indexRange: indexRange
5113 });
5114 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
5115 // (since SegmentBase is only for one total segment)
5116
5117 if (duration) {
5118 var segmentTimeInfo = parseByDuration(attributes);
5119
5120 if (segmentTimeInfo.length) {
5121 segment.duration = segmentTimeInfo[0].duration;
5122 segment.timeline = segmentTimeInfo[0].timeline;
5123 }
5124 } else if (sourceDuration) {
5125 segment.duration = sourceDuration;
5126 segment.timeline = 0;
5127 } // This is used for mediaSequence
5128
5129
5130 segment.number = 0;
5131 return [segment];
5132 };
5133 /**
5134 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
5135 * according to the sidx information given.
5136 *
5137 * playlist.sidx has metadadata about the sidx where-as the sidx param
5138 * is the parsed sidx box itself.
5139 *
5140 * @param {Object} playlist the playlist to update the sidx information for
5141 * @param {Object} sidx the parsed sidx box
5142 * @return {Object} the playlist object with the updated sidx information
5143 */
5144
5145
5146 var addSidxSegmentsToPlaylist = function addSidxSegmentsToPlaylist(playlist, sidx, baseUrl) {
5147 // Retain init segment information
5148 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial master manifest parsing
5149
5150 var sourceDuration = playlist.sidx.duration; // Retain source timeline
5151
5152 var timeline = playlist.timeline || 0;
5153 var sidxByteRange = playlist.sidx.byterange;
5154 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
5155
5156 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
5157
5158 var mediaReferences = sidx.references.filter(function (r) {
5159 return r.referenceType !== 1;
5160 });
5161 var segments = []; // firstOffset is the offset from the end of the sidx box
5162
5163 var startIndex = sidxEnd + sidx.firstOffset;
5164
5165 for (var i = 0; i < mediaReferences.length; i++) {
5166 var reference = sidx.references[i]; // size of the referenced (sub)segment
5167
5168 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
5169 // this will be converted to seconds when generating segments
5170
5171 var duration = reference.subsegmentDuration; // should be an inclusive range
5172
5173 var endIndex = startIndex + size - 1;
5174 var indexRange = startIndex + "-" + endIndex;
5175 var attributes = {
5176 baseUrl: baseUrl,
5177 timescale: timescale,
5178 timeline: timeline,
5179 // this is used in parseByDuration
5180 periodIndex: timeline,
5181 duration: duration,
5182 sourceDuration: sourceDuration,
5183 indexRange: indexRange
5184 };
5185 var segment = segmentsFromBase(attributes)[0];
5186
5187 if (initSegment) {
5188 segment.map = initSegment;
5189 }
5190
5191 segments.push(segment);
5192 startIndex += size;
5193 }
5194
5195 playlist.segments = segments;
5196 return playlist;
5197 };
5198
5199 var generateSidxKey = function generateSidxKey(sidx) {
5200 return sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);
5201 };
5202
5203 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
5204 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
5205 // assuming playlist IDs are the same across periods
5206 // TODO: handle multiperiod where representation sets are not the same
5207 // across periods
5208 var name = playlist.attributes.id + (playlist.attributes.lang || ''); // Periods after first
5209
5210 if (acc[name]) {
5211 var _acc$name$segments; // first segment of subsequent periods signal a discontinuity
5212
5213
5214 if (playlist.segments[0]) {
5215 playlist.segments[0].discontinuity = true;
5216 }
5217
5218 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments); // bubble up contentProtection, this assumes all DRM content
5219 // has the same contentProtection
5220
5221
5222 if (playlist.attributes.contentProtection) {
5223 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
5224 }
5225 } else {
5226 // first Period
5227 acc[name] = playlist;
5228 }
5229
5230 return acc;
5231 }, {}));
5232 return mergedPlaylists.map(function (playlist) {
5233 playlist.discontinuityStarts = findIndexes(playlist.segments, 'discontinuity');
5234 return playlist;
5235 });
5236 };
5237
5238 var addSidxSegmentsToPlaylist$1 = function addSidxSegmentsToPlaylist$1(playlist, sidxMapping) {
5239 var sidxKey = generateSidxKey(playlist.sidx);
5240 var sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
5241
5242 if (sidxMatch) {
5243 addSidxSegmentsToPlaylist(playlist, sidxMatch, playlist.sidx.resolvedUri);
5244 }
5245
5246 return playlist;
5247 };
5248
5249 var addSidxSegmentsToPlaylists = function addSidxSegmentsToPlaylists(playlists, sidxMapping) {
5250 if (sidxMapping === void 0) {
5251 sidxMapping = {};
5252 }
5253
5254 if (!Object.keys(sidxMapping).length) {
5255 return playlists;
5256 }
5257
5258 for (var i in playlists) {
5259 playlists[i] = addSidxSegmentsToPlaylist$1(playlists[i], sidxMapping);
5260 }
5261
5262 return playlists;
5263 };
5264
5265 var formatAudioPlaylist = function formatAudioPlaylist(_ref, isAudioOnly) {
5266 var _attributes;
5267
5268 var attributes = _ref.attributes,
5269 segments = _ref.segments,
5270 sidx = _ref.sidx;
5271 var playlist = {
5272 attributes: (_attributes = {
5273 NAME: attributes.id,
5274 BANDWIDTH: attributes.bandwidth,
5275 CODECS: attributes.codecs
5276 }, _attributes['PROGRAM-ID'] = 1, _attributes),
5277 uri: '',
5278 endList: (attributes.type || 'static') === 'static',
5279 timeline: attributes.periodIndex,
5280 resolvedUri: '',
5281 targetDuration: attributes.duration,
5282 segments: segments,
5283 mediaSequence: segments.length ? segments[0].number : 1
5284 };
5285
5286 if (attributes.contentProtection) {
5287 playlist.contentProtection = attributes.contentProtection;
5288 }
5289
5290 if (sidx) {
5291 playlist.sidx = sidx;
5292 }
5293
5294 if (isAudioOnly) {
5295 playlist.attributes.AUDIO = 'audio';
5296 playlist.attributes.SUBTITLES = 'subs';
5297 }
5298
5299 return playlist;
5300 };
5301
5302 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
5303 var _m3u8Attributes;
5304
5305 var attributes = _ref2.attributes,
5306 segments = _ref2.segments;
5307
5308 if (typeof segments === 'undefined') {
5309 // vtt tracks may use single file in BaseURL
5310 segments = [{
5311 uri: attributes.baseUrl,
5312 timeline: attributes.periodIndex,
5313 resolvedUri: attributes.baseUrl || '',
5314 duration: attributes.sourceDuration,
5315 number: 0
5316 }]; // targetDuration should be the same duration as the only segment
5317
5318 attributes.duration = attributes.sourceDuration;
5319 }
5320
5321 var m3u8Attributes = (_m3u8Attributes = {
5322 NAME: attributes.id,
5323 BANDWIDTH: attributes.bandwidth
5324 }, _m3u8Attributes['PROGRAM-ID'] = 1, _m3u8Attributes);
5325
5326 if (attributes.codecs) {
5327 m3u8Attributes.CODECS = attributes.codecs;
5328 }
5329
5330 return {
5331 attributes: m3u8Attributes,
5332 uri: '',
5333 endList: (attributes.type || 'static') === 'static',
5334 timeline: attributes.periodIndex,
5335 resolvedUri: attributes.baseUrl || '',
5336 targetDuration: attributes.duration,
5337 segments: segments,
5338 mediaSequence: segments.length ? segments[0].number : 1
5339 };
5340 };
5341
5342 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping, isAudioOnly) {
5343 if (sidxMapping === void 0) {
5344 sidxMapping = {};
5345 }
5346
5347 if (isAudioOnly === void 0) {
5348 isAudioOnly = false;
5349 }
5350
5351 var mainPlaylist;
5352 var formattedPlaylists = playlists.reduce(function (a, playlist) {
5353 var role = playlist.attributes.role && playlist.attributes.role.value || '';
5354 var language = playlist.attributes.lang || '';
5355 var label = playlist.attributes.label || 'main';
5356
5357 if (language && !playlist.attributes.label) {
5358 var roleLabel = role ? " (" + role + ")" : '';
5359 label = "" + playlist.attributes.lang + roleLabel;
5360 }
5361
5362 if (!a[label]) {
5363 a[label] = {
5364 language: language,
5365 autoselect: true,
5366 default: role === 'main',
5367 playlists: [],
5368 uri: ''
5369 };
5370 }
5371
5372 var formatted = addSidxSegmentsToPlaylist$1(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);
5373 a[label].playlists.push(formatted);
5374
5375 if (typeof mainPlaylist === 'undefined' && role === 'main') {
5376 mainPlaylist = playlist;
5377 mainPlaylist.default = true;
5378 }
5379
5380 return a;
5381 }, {}); // if no playlists have role "main", mark the first as main
5382
5383 if (!mainPlaylist) {
5384 var firstLabel = Object.keys(formattedPlaylists)[0];
5385 formattedPlaylists[firstLabel].default = true;
5386 }
5387
5388 return formattedPlaylists;
5389 };
5390
5391 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
5392 if (sidxMapping === void 0) {
5393 sidxMapping = {};
5394 }
5395
5396 return playlists.reduce(function (a, playlist) {
5397 var label = playlist.attributes.lang || 'text';
5398
5399 if (!a[label]) {
5400 a[label] = {
5401 language: label,
5402 default: false,
5403 autoselect: false,
5404 playlists: [],
5405 uri: ''
5406 };
5407 }
5408
5409 a[label].playlists.push(addSidxSegmentsToPlaylist$1(formatVttPlaylist(playlist), sidxMapping));
5410 return a;
5411 }, {});
5412 };
5413
5414 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
5415 var _attributes2;
5416
5417 var attributes = _ref3.attributes,
5418 segments = _ref3.segments,
5419 sidx = _ref3.sidx;
5420 var playlist = {
5421 attributes: (_attributes2 = {
5422 NAME: attributes.id,
5423 AUDIO: 'audio',
5424 SUBTITLES: 'subs',
5425 RESOLUTION: {
5426 width: attributes.width,
5427 height: attributes.height
5428 },
5429 CODECS: attributes.codecs,
5430 BANDWIDTH: attributes.bandwidth
5431 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
5432 uri: '',
5433 endList: (attributes.type || 'static') === 'static',
5434 timeline: attributes.periodIndex,
5435 resolvedUri: '',
5436 targetDuration: attributes.duration,
5437 segments: segments,
5438 mediaSequence: segments.length ? segments[0].number : 1
5439 };
5440
5441 if (attributes.contentProtection) {
5442 playlist.contentProtection = attributes.contentProtection;
5443 }
5444
5445 if (sidx) {
5446 playlist.sidx = sidx;
5447 }
5448
5449 return playlist;
5450 };
5451
5452 var videoOnly = function videoOnly(_ref4) {
5453 var attributes = _ref4.attributes;
5454 return attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';
5455 };
5456
5457 var audioOnly = function audioOnly(_ref5) {
5458 var attributes = _ref5.attributes;
5459 return attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';
5460 };
5461
5462 var vttOnly = function vttOnly(_ref6) {
5463 var attributes = _ref6.attributes;
5464 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
5465 };
5466
5467 var toM3u8 = function toM3u8(dashPlaylists, locations, sidxMapping) {
5468 var _mediaGroups;
5469
5470 if (sidxMapping === void 0) {
5471 sidxMapping = {};
5472 }
5473
5474 if (!dashPlaylists.length) {
5475 return {};
5476 } // grab all master attributes
5477
5478
5479 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
5480 duration = _dashPlaylists$0$attr.sourceDuration,
5481 _dashPlaylists$0$attr2 = _dashPlaylists$0$attr.type,
5482 type = _dashPlaylists$0$attr2 === void 0 ? 'static' : _dashPlaylists$0$attr2,
5483 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
5484 minimumUpdatePeriod = _dashPlaylists$0$attr.minimumUpdatePeriod;
5485 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
5486 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
5487 var vttPlaylists = dashPlaylists.filter(vttOnly);
5488 var master = {
5489 allowCache: true,
5490 discontinuityStarts: [],
5491 segments: [],
5492 endList: true,
5493 mediaGroups: (_mediaGroups = {
5494 AUDIO: {},
5495 VIDEO: {}
5496 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
5497 uri: '',
5498 duration: duration,
5499 playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)
5500 };
5501
5502 if (minimumUpdatePeriod >= 0) {
5503 master.minimumUpdatePeriod = minimumUpdatePeriod * 1000;
5504 }
5505
5506 if (locations) {
5507 master.locations = locations;
5508 }
5509
5510 if (type === 'dynamic') {
5511 master.suggestedPresentationDelay = suggestedPresentationDelay;
5512 }
5513
5514 var isAudioOnly = master.playlists.length === 0;
5515
5516 if (audioPlaylists.length) {
5517 master.mediaGroups.AUDIO.audio = organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly);
5518 }
5519
5520 if (vttPlaylists.length) {
5521 master.mediaGroups.SUBTITLES.subs = organizeVttPlaylists(vttPlaylists, sidxMapping);
5522 }
5523
5524 return master;
5525 };
5526 /**
5527 * Calculates the R (repetition) value for a live stream (for the final segment
5528 * in a manifest where the r value is negative 1)
5529 *
5530 * @param {Object} attributes
5531 * Object containing all inherited attributes from parent elements with attribute
5532 * names as keys
5533 * @param {number} time
5534 * current time (typically the total time up until the final segment)
5535 * @param {number} duration
5536 * duration property for the given <S />
5537 *
5538 * @return {number}
5539 * R value to reach the end of the given period
5540 */
5541
5542
5543 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
5544 var NOW = attributes.NOW,
5545 clientOffset = attributes.clientOffset,
5546 availabilityStartTime = attributes.availabilityStartTime,
5547 _attributes$timescale = attributes.timescale,
5548 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
5549 _attributes$start = attributes.start,
5550 start = _attributes$start === void 0 ? 0 : _attributes$start,
5551 _attributes$minimumUp = attributes.minimumUpdatePeriod,
5552 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
5553 var now = (NOW + clientOffset) / 1000;
5554 var periodStartWC = availabilityStartTime + start;
5555 var periodEndWC = now + minimumUpdatePeriod;
5556 var periodDuration = periodEndWC - periodStartWC;
5557 return Math.ceil((periodDuration * timescale - time) / duration);
5558 };
5559 /**
5560 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
5561 * timing and duration
5562 *
5563 * @param {Object} attributes
5564 * Object containing all inherited attributes from parent elements with attribute
5565 * names as keys
5566 * @param {Object[]} segmentTimeline
5567 * List of objects representing the attributes of each S element contained within
5568 *
5569 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5570 * List of Objects with segment timing and duration info
5571 */
5572
5573
5574 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
5575 var _attributes$type = attributes.type,
5576 type = _attributes$type === void 0 ? 'static' : _attributes$type,
5577 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
5578 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
5579 _attributes$media = attributes.media,
5580 media = _attributes$media === void 0 ? '' : _attributes$media,
5581 sourceDuration = attributes.sourceDuration,
5582 _attributes$timescale2 = attributes.timescale,
5583 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
5584 _attributes$startNumb = attributes.startNumber,
5585 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
5586 timeline = attributes.periodIndex;
5587 var segments = [];
5588 var time = -1;
5589
5590 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
5591 var S = segmentTimeline[sIndex];
5592 var duration = S.d;
5593 var repeat = S.r || 0;
5594 var segmentTime = S.t || 0;
5595
5596 if (time < 0) {
5597 // first segment
5598 time = segmentTime;
5599 }
5600
5601 if (segmentTime && segmentTime > time) {
5602 // discontinuity
5603 // TODO: How to handle this type of discontinuity
5604 // timeline++ here would treat it like HLS discontuity and content would
5605 // get appended without gap
5606 // E.G.
5607 // <S t="0" d="1" />
5608 // <S d="1" />
5609 // <S d="1" />
5610 // <S t="5" d="1" />
5611 // would have $Time$ values of [0, 1, 2, 5]
5612 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
5613 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
5614 // does the value of sourceDuration consider this when calculating arbitrary
5615 // negative @r repeat value?
5616 // E.G. Same elements as above with this added at the end
5617 // <S d="1" r="-1" />
5618 // with a sourceDuration of 10
5619 // Would the 2 gaps be included in the time duration calculations resulting in
5620 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
5621 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
5622 time = segmentTime;
5623 }
5624
5625 var count = void 0;
5626
5627 if (repeat < 0) {
5628 var nextS = sIndex + 1;
5629
5630 if (nextS === segmentTimeline.length) {
5631 // last segment
5632 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
5633 count = getLiveRValue(attributes, time, duration);
5634 } else {
5635 // TODO: This may be incorrect depending on conclusion of TODO above
5636 count = (sourceDuration * timescale - time) / duration;
5637 }
5638 } else {
5639 count = (segmentTimeline[nextS].t - time) / duration;
5640 }
5641 } else {
5642 count = repeat + 1;
5643 }
5644
5645 var end = startNumber + segments.length + count;
5646 var number = startNumber + segments.length;
5647
5648 while (number < end) {
5649 segments.push({
5650 number: number,
5651 duration: duration / timescale,
5652 time: time,
5653 timeline: timeline
5654 });
5655 time += duration;
5656 number++;
5657 }
5658 }
5659
5660 return segments;
5661 };
5662
5663 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
5664 /**
5665 * Replaces template identifiers with corresponding values. To be used as the callback
5666 * for String.prototype.replace
5667 *
5668 * @name replaceCallback
5669 * @function
5670 * @param {string} match
5671 * Entire match of identifier
5672 * @param {string} identifier
5673 * Name of matched identifier
5674 * @param {string} format
5675 * Format tag string. Its presence indicates that padding is expected
5676 * @param {string} width
5677 * Desired length of the replaced value. Values less than this width shall be left
5678 * zero padded
5679 * @return {string}
5680 * Replacement for the matched identifier
5681 */
5682
5683 /**
5684 * Returns a function to be used as a callback for String.prototype.replace to replace
5685 * template identifiers
5686 *
5687 * @param {Obect} values
5688 * Object containing values that shall be used to replace known identifiers
5689 * @param {number} values.RepresentationID
5690 * Value of the Representation@id attribute
5691 * @param {number} values.Number
5692 * Number of the corresponding segment
5693 * @param {number} values.Bandwidth
5694 * Value of the Representation@bandwidth attribute.
5695 * @param {number} values.Time
5696 * Timestamp value of the corresponding segment
5697 * @return {replaceCallback}
5698 * Callback to be used with String.prototype.replace to replace identifiers
5699 */
5700
5701 var identifierReplacement = function identifierReplacement(values) {
5702 return function (match, identifier, format, width) {
5703 if (match === '$$') {
5704 // escape sequence
5705 return '$';
5706 }
5707
5708 if (typeof values[identifier] === 'undefined') {
5709 return match;
5710 }
5711
5712 var value = '' + values[identifier];
5713
5714 if (identifier === 'RepresentationID') {
5715 // Format tag shall not be present with RepresentationID
5716 return value;
5717 }
5718
5719 if (!format) {
5720 width = 1;
5721 } else {
5722 width = parseInt(width, 10);
5723 }
5724
5725 if (value.length >= width) {
5726 return value;
5727 }
5728
5729 return "" + new Array(width - value.length + 1).join('0') + value;
5730 };
5731 };
5732 /**
5733 * Constructs a segment url from a template string
5734 *
5735 * @param {string} url
5736 * Template string to construct url from
5737 * @param {Obect} values
5738 * Object containing values that shall be used to replace known identifiers
5739 * @param {number} values.RepresentationID
5740 * Value of the Representation@id attribute
5741 * @param {number} values.Number
5742 * Number of the corresponding segment
5743 * @param {number} values.Bandwidth
5744 * Value of the Representation@bandwidth attribute.
5745 * @param {number} values.Time
5746 * Timestamp value of the corresponding segment
5747 * @return {string}
5748 * Segment url with identifiers replaced
5749 */
5750
5751
5752 var constructTemplateUrl = function constructTemplateUrl(url, values) {
5753 return url.replace(identifierPattern, identifierReplacement(values));
5754 };
5755 /**
5756 * Generates a list of objects containing timing and duration information about each
5757 * segment needed to generate segment uris and the complete segment object
5758 *
5759 * @param {Object} attributes
5760 * Object containing all inherited attributes from parent elements with attribute
5761 * names as keys
5762 * @param {Object[]|undefined} segmentTimeline
5763 * List of objects representing the attributes of each S element contained within
5764 * the SegmentTimeline element
5765 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5766 * List of Objects with segment timing and duration info
5767 */
5768
5769
5770 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
5771 if (!attributes.duration && !segmentTimeline) {
5772 // if neither @duration or SegmentTimeline are present, then there shall be exactly
5773 // one media segment
5774 return [{
5775 number: attributes.startNumber || 1,
5776 duration: attributes.sourceDuration,
5777 time: 0,
5778 timeline: attributes.periodIndex
5779 }];
5780 }
5781
5782 if (attributes.duration) {
5783 return parseByDuration(attributes);
5784 }
5785
5786 return parseByTimeline(attributes, segmentTimeline);
5787 };
5788 /**
5789 * Generates a list of segments using information provided by the SegmentTemplate element
5790 *
5791 * @param {Object} attributes
5792 * Object containing all inherited attributes from parent elements with attribute
5793 * names as keys
5794 * @param {Object[]|undefined} segmentTimeline
5795 * List of objects representing the attributes of each S element contained within
5796 * the SegmentTimeline element
5797 * @return {Object[]}
5798 * List of segment objects
5799 */
5800
5801
5802 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
5803 var templateValues = {
5804 RepresentationID: attributes.id,
5805 Bandwidth: attributes.bandwidth || 0
5806 };
5807 var _attributes$initializ = attributes.initialization,
5808 initialization = _attributes$initializ === void 0 ? {
5809 sourceURL: '',
5810 range: ''
5811 } : _attributes$initializ;
5812 var mapSegment = urlTypeToSegment({
5813 baseUrl: attributes.baseUrl,
5814 source: constructTemplateUrl(initialization.sourceURL, templateValues),
5815 range: initialization.range
5816 });
5817 var segments = parseTemplateInfo(attributes, segmentTimeline);
5818 return segments.map(function (segment) {
5819 templateValues.Number = segment.number;
5820 templateValues.Time = segment.time;
5821 var uri = constructTemplateUrl(attributes.media || '', templateValues);
5822 var map = {
5823 uri: uri,
5824 timeline: segment.timeline,
5825 duration: segment.duration,
5826 resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),
5827 map: mapSegment,
5828 number: segment.number
5829 };
5830
5831 if (attributes.presentationTimeOffset) {
5832 map.presentationTimeOffset = attributes.presentationTimeOffset;
5833 }
5834
5835 return map;
5836 });
5837 };
5838 /**
5839 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
5840 * to an object that matches the output of a segment in videojs/mpd-parser
5841 *
5842 * @param {Object} attributes
5843 * Object containing all inherited attributes from parent elements with attribute
5844 * names as keys
5845 * @param {Object} segmentUrl
5846 * <SegmentURL> node to translate into a segment object
5847 * @return {Object} translated segment object
5848 */
5849
5850
5851 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
5852 var baseUrl = attributes.baseUrl,
5853 _attributes$initializ = attributes.initialization,
5854 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
5855 var initSegment = urlTypeToSegment({
5856 baseUrl: baseUrl,
5857 source: initialization.sourceURL,
5858 range: initialization.range
5859 });
5860 var segment = urlTypeToSegment({
5861 baseUrl: baseUrl,
5862 source: segmentUrl.media,
5863 range: segmentUrl.mediaRange
5864 });
5865 segment.map = initSegment;
5866 return segment;
5867 };
5868 /**
5869 * Generates a list of segments using information provided by the SegmentList element
5870 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
5871 * node should be translated into segment.
5872 *
5873 * @param {Object} attributes
5874 * Object containing all inherited attributes from parent elements with attribute
5875 * names as keys
5876 * @param {Object[]|undefined} segmentTimeline
5877 * List of objects representing the attributes of each S element contained within
5878 * the SegmentTimeline element
5879 * @return {Object.<Array>} list of segments
5880 */
5881
5882
5883 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
5884 var duration = attributes.duration,
5885 _attributes$segmentUr = attributes.segmentUrls,
5886 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr; // Per spec (5.3.9.2.1) no way to determine segment duration OR
5887 // if both SegmentTimeline and @duration are defined, it is outside of spec.
5888
5889 if (!duration && !segmentTimeline || duration && segmentTimeline) {
5890 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
5891 }
5892
5893 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
5894 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
5895 });
5896 var segmentTimeInfo;
5897
5898 if (duration) {
5899 segmentTimeInfo = parseByDuration(attributes);
5900 }
5901
5902 if (segmentTimeline) {
5903 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
5904 }
5905
5906 var segments = segmentTimeInfo.map(function (segmentTime, index) {
5907 if (segmentUrlMap[index]) {
5908 var segment = segmentUrlMap[index];
5909 segment.timeline = segmentTime.timeline;
5910 segment.duration = segmentTime.duration;
5911 segment.number = segmentTime.number;
5912 return segment;
5913 } // Since we're mapping we should get rid of any blank segments (in case
5914 // the given SegmentTimeline is handling for more elements than we have
5915 // SegmentURLs for).
5916
5917 }).filter(function (segment) {
5918 return segment;
5919 });
5920 return segments;
5921 };
5922
5923 var generateSegments = function generateSegments(_ref) {
5924 var attributes = _ref.attributes,
5925 segmentInfo = _ref.segmentInfo;
5926 var segmentAttributes;
5927 var segmentsFn;
5928
5929 if (segmentInfo.template) {
5930 segmentsFn = segmentsFromTemplate;
5931 segmentAttributes = merge(attributes, segmentInfo.template);
5932
5933 if (segmentInfo.template.presentationTimeOffset) {
5934 segmentAttributes.presentationTimeOffset = segmentInfo.template.presentationTimeOffset / segmentInfo.template.timescale;
5935 }
5936 } else if (segmentInfo.base) {
5937 segmentsFn = segmentsFromBase;
5938 segmentAttributes = merge(attributes, segmentInfo.base);
5939 } else if (segmentInfo.list) {
5940 segmentsFn = segmentsFromList;
5941 segmentAttributes = merge(attributes, segmentInfo.list);
5942 }
5943
5944 var segmentsInfo = {
5945 attributes: attributes
5946 };
5947
5948 if (!segmentsFn) {
5949 return segmentsInfo;
5950 }
5951
5952 var segments = segmentsFn(segmentAttributes, segmentInfo.timeline); // The @duration attribute will be used to determin the playlist's targetDuration which
5953 // must be in seconds. Since we've generated the segment list, we no longer need
5954 // @duration to be in @timescale units, so we can convert it here.
5955
5956 if (segmentAttributes.duration) {
5957 var _segmentAttributes = segmentAttributes,
5958 duration = _segmentAttributes.duration,
5959 _segmentAttributes$ti = _segmentAttributes.timescale,
5960 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
5961 segmentAttributes.duration = duration / timescale;
5962 } else if (segments.length) {
5963 // if there is no @duration attribute, use the largest segment duration as
5964 // as target duration
5965 segmentAttributes.duration = segments.reduce(function (max, segment) {
5966 return Math.max(max, Math.ceil(segment.duration));
5967 }, 0);
5968 } else {
5969 segmentAttributes.duration = 0;
5970 }
5971
5972 segmentsInfo.attributes = segmentAttributes;
5973 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
5974
5975 if (segmentInfo.base && segmentAttributes.indexRange) {
5976 segmentsInfo.sidx = segments[0];
5977 segmentsInfo.segments = [];
5978 }
5979
5980 return segmentsInfo;
5981 };
5982
5983 var toPlaylists = function toPlaylists(representations) {
5984 return representations.map(generateSegments);
5985 };
5986
5987 var findChildren = function findChildren(element, name) {
5988 return from(element.childNodes).filter(function (_ref) {
5989 var tagName = _ref.tagName;
5990 return tagName === name;
5991 });
5992 };
5993
5994 var getContent = function getContent(element) {
5995 return element.textContent.trim();
5996 };
5997
5998 var parseDuration = function parseDuration(str) {
5999 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
6000 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
6001 var SECONDS_IN_DAY = 24 * 60 * 60;
6002 var SECONDS_IN_HOUR = 60 * 60;
6003 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
6004
6005 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
6006 var match = durationRegex.exec(str);
6007
6008 if (!match) {
6009 return 0;
6010 }
6011
6012 var _match$slice = match.slice(1),
6013 year = _match$slice[0],
6014 month = _match$slice[1],
6015 day = _match$slice[2],
6016 hour = _match$slice[3],
6017 minute = _match$slice[4],
6018 second = _match$slice[5];
6019
6020 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
6021 };
6022
6023 var parseDate = function parseDate(str) {
6024 // Date format without timezone according to ISO 8601
6025 // YYY-MM-DDThh:mm:ss.ssssss
6026 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
6027 // expressed by ending with 'Z'
6028
6029 if (dateRegex.test(str)) {
6030 str += 'Z';
6031 }
6032
6033 return Date.parse(str);
6034 };
6035
6036 var parsers = {
6037 /**
6038 * Specifies the duration of the entire Media Presentation. Format is a duration string
6039 * as specified in ISO 8601
6040 *
6041 * @param {string} value
6042 * value of attribute as a string
6043 * @return {number}
6044 * The duration in seconds
6045 */
6046 mediaPresentationDuration: function mediaPresentationDuration(value) {
6047 return parseDuration(value);
6048 },
6049
6050 /**
6051 * Specifies the Segment availability start time for all Segments referred to in this
6052 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
6053 * time. Format is a date string as specified in ISO 8601
6054 *
6055 * @param {string} value
6056 * value of attribute as a string
6057 * @return {number}
6058 * The date as seconds from unix epoch
6059 */
6060 availabilityStartTime: function availabilityStartTime(value) {
6061 return parseDate(value) / 1000;
6062 },
6063
6064 /**
6065 * Specifies the smallest period between potential changes to the MPD. Format is a
6066 * duration string as specified in ISO 8601
6067 *
6068 * @param {string} value
6069 * value of attribute as a string
6070 * @return {number}
6071 * The duration in seconds
6072 */
6073 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
6074 return parseDuration(value);
6075 },
6076
6077 /**
6078 * Specifies the suggested presentation delay. Format is a
6079 * duration string as specified in ISO 8601
6080 *
6081 * @param {string} value
6082 * value of attribute as a string
6083 * @return {number}
6084 * The duration in seconds
6085 */
6086 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
6087 return parseDuration(value);
6088 },
6089
6090 /**
6091 * specifices the type of mpd. Can be either "static" or "dynamic"
6092 *
6093 * @param {string} value
6094 * value of attribute as a string
6095 *
6096 * @return {string}
6097 * The type as a string
6098 */
6099 type: function type(value) {
6100 return value;
6101 },
6102
6103 /**
6104 * Specifies the duration of the smallest time shifting buffer for any Representation
6105 * in the MPD. Format is a duration string as specified in ISO 8601
6106 *
6107 * @param {string} value
6108 * value of attribute as a string
6109 * @return {number}
6110 * The duration in seconds
6111 */
6112 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
6113 return parseDuration(value);
6114 },
6115
6116 /**
6117 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
6118 * Format is a duration string as specified in ISO 8601
6119 *
6120 * @param {string} value
6121 * value of attribute as a string
6122 * @return {number}
6123 * The duration in seconds
6124 */
6125 start: function start(value) {
6126 return parseDuration(value);
6127 },
6128
6129 /**
6130 * Specifies the width of the visual presentation
6131 *
6132 * @param {string} value
6133 * value of attribute as a string
6134 * @return {number}
6135 * The parsed width
6136 */
6137 width: function width(value) {
6138 return parseInt(value, 10);
6139 },
6140
6141 /**
6142 * Specifies the height of the visual presentation
6143 *
6144 * @param {string} value
6145 * value of attribute as a string
6146 * @return {number}
6147 * The parsed height
6148 */
6149 height: function height(value) {
6150 return parseInt(value, 10);
6151 },
6152
6153 /**
6154 * Specifies the bitrate of the representation
6155 *
6156 * @param {string} value
6157 * value of attribute as a string
6158 * @return {number}
6159 * The parsed bandwidth
6160 */
6161 bandwidth: function bandwidth(value) {
6162 return parseInt(value, 10);
6163 },
6164
6165 /**
6166 * Specifies the number of the first Media Segment in this Representation in the Period
6167 *
6168 * @param {string} value
6169 * value of attribute as a string
6170 * @return {number}
6171 * The parsed number
6172 */
6173 startNumber: function startNumber(value) {
6174 return parseInt(value, 10);
6175 },
6176
6177 /**
6178 * Specifies the timescale in units per seconds
6179 *
6180 * @param {string} value
6181 * value of attribute as a string
6182 * @return {number}
6183 * The parsed timescale
6184 */
6185 timescale: function timescale(value) {
6186 return parseInt(value, 10);
6187 },
6188
6189 /**
6190 * Specifies the presentationTimeOffset.
6191 *
6192 * @param {string} value
6193 * value of the attribute as a string
6194 *
6195 * @return {number}
6196 * The parsed presentationTimeOffset
6197 */
6198 presentationTimeOffset: function presentationTimeOffset(value) {
6199 return parseInt(value, 10);
6200 },
6201
6202 /**
6203 * Specifies the constant approximate Segment duration
6204 * NOTE: The <Period> element also contains an @duration attribute. This duration
6205 * specifies the duration of the Period. This attribute is currently not
6206 * supported by the rest of the parser, however we still check for it to prevent
6207 * errors.
6208 *
6209 * @param {string} value
6210 * value of attribute as a string
6211 * @return {number}
6212 * The parsed duration
6213 */
6214 duration: function duration(value) {
6215 var parsedValue = parseInt(value, 10);
6216
6217 if (isNaN(parsedValue)) {
6218 return parseDuration(value);
6219 }
6220
6221 return parsedValue;
6222 },
6223
6224 /**
6225 * Specifies the Segment duration, in units of the value of the @timescale.
6226 *
6227 * @param {string} value
6228 * value of attribute as a string
6229 * @return {number}
6230 * The parsed duration
6231 */
6232 d: function d(value) {
6233 return parseInt(value, 10);
6234 },
6235
6236 /**
6237 * Specifies the MPD start time, in @timescale units, the first Segment in the series
6238 * starts relative to the beginning of the Period
6239 *
6240 * @param {string} value
6241 * value of attribute as a string
6242 * @return {number}
6243 * The parsed time
6244 */
6245 t: function t(value) {
6246 return parseInt(value, 10);
6247 },
6248
6249 /**
6250 * Specifies the repeat count of the number of following contiguous Segments with the
6251 * same duration expressed by the value of @d
6252 *
6253 * @param {string} value
6254 * value of attribute as a string
6255 * @return {number}
6256 * The parsed number
6257 */
6258 r: function r(value) {
6259 return parseInt(value, 10);
6260 },
6261
6262 /**
6263 * Default parser for all other attributes. Acts as a no-op and just returns the value
6264 * as a string
6265 *
6266 * @param {string} value
6267 * value of attribute as a string
6268 * @return {string}
6269 * Unparsed value
6270 */
6271 DEFAULT: function DEFAULT(value) {
6272 return value;
6273 }
6274 };
6275 /**
6276 * Gets all the attributes and values of the provided node, parses attributes with known
6277 * types, and returns an object with attribute names mapped to values.
6278 *
6279 * @param {Node} el
6280 * The node to parse attributes from
6281 * @return {Object}
6282 * Object with all attributes of el parsed
6283 */
6284
6285 var parseAttributes = function parseAttributes(el) {
6286 if (!(el && el.attributes)) {
6287 return {};
6288 }
6289
6290 return from(el.attributes).reduce(function (a, e) {
6291 var parseFn = parsers[e.name] || parsers.DEFAULT;
6292 a[e.name] = parseFn(e.value);
6293 return a;
6294 }, {});
6295 };
6296
6297 var keySystemsMap = {
6298 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
6299 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
6300 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
6301 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
6302 };
6303 /**
6304 * Builds a list of urls that is the product of the reference urls and BaseURL values
6305 *
6306 * @param {string[]} referenceUrls
6307 * List of reference urls to resolve to
6308 * @param {Node[]} baseUrlElements
6309 * List of BaseURL nodes from the mpd
6310 * @return {string[]}
6311 * List of resolved urls
6312 */
6313
6314 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
6315 if (!baseUrlElements.length) {
6316 return referenceUrls;
6317 }
6318
6319 return flatten(referenceUrls.map(function (reference) {
6320 return baseUrlElements.map(function (baseUrlElement) {
6321 return resolveUrl$1(reference, getContent(baseUrlElement));
6322 });
6323 }));
6324 };
6325 /**
6326 * Contains all Segment information for its containing AdaptationSet
6327 *
6328 * @typedef {Object} SegmentInformation
6329 * @property {Object|undefined} template
6330 * Contains the attributes for the SegmentTemplate node
6331 * @property {Object[]|undefined} timeline
6332 * Contains a list of atrributes for each S node within the SegmentTimeline node
6333 * @property {Object|undefined} list
6334 * Contains the attributes for the SegmentList node
6335 * @property {Object|undefined} base
6336 * Contains the attributes for the SegmentBase node
6337 */
6338
6339 /**
6340 * Returns all available Segment information contained within the AdaptationSet node
6341 *
6342 * @param {Node} adaptationSet
6343 * The AdaptationSet node to get Segment information from
6344 * @return {SegmentInformation}
6345 * The Segment information contained within the provided AdaptationSet
6346 */
6347
6348
6349 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
6350 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
6351 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
6352 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
6353 return merge({
6354 tag: 'SegmentURL'
6355 }, parseAttributes(s));
6356 });
6357 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
6358 var segmentTimelineParentNode = segmentList || segmentTemplate;
6359 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
6360 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
6361 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
6362 // @initialization and an <Initialization> node. @initialization can be templated,
6363 // while the node can have a url and range specified. If the <SegmentTemplate> has
6364 // both @initialization and an <Initialization> subelement we opt to override with
6365 // the node, as this interaction is not defined in the spec.
6366
6367 var template = segmentTemplate && parseAttributes(segmentTemplate);
6368
6369 if (template && segmentInitialization) {
6370 template.initialization = segmentInitialization && parseAttributes(segmentInitialization);
6371 } else if (template && template.initialization) {
6372 // If it is @initialization we convert it to an object since this is the format that
6373 // later functions will rely on for the initialization segment. This is only valid
6374 // for <SegmentTemplate>
6375 template.initialization = {
6376 sourceURL: template.initialization
6377 };
6378 }
6379
6380 var segmentInfo = {
6381 template: template,
6382 timeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
6383 return parseAttributes(s);
6384 }),
6385 list: segmentList && merge(parseAttributes(segmentList), {
6386 segmentUrls: segmentUrls,
6387 initialization: parseAttributes(segmentInitialization)
6388 }),
6389 base: segmentBase && merge(parseAttributes(segmentBase), {
6390 initialization: parseAttributes(segmentInitialization)
6391 })
6392 };
6393 Object.keys(segmentInfo).forEach(function (key) {
6394 if (!segmentInfo[key]) {
6395 delete segmentInfo[key];
6396 }
6397 });
6398 return segmentInfo;
6399 };
6400 /**
6401 * Contains Segment information and attributes needed to construct a Playlist object
6402 * from a Representation
6403 *
6404 * @typedef {Object} RepresentationInformation
6405 * @property {SegmentInformation} segmentInfo
6406 * Segment information for this Representation
6407 * @property {Object} attributes
6408 * Inherited attributes for this Representation
6409 */
6410
6411 /**
6412 * Maps a Representation node to an object containing Segment information and attributes
6413 *
6414 * @name inheritBaseUrlsCallback
6415 * @function
6416 * @param {Node} representation
6417 * Representation node from the mpd
6418 * @return {RepresentationInformation}
6419 * Representation information needed to construct a Playlist object
6420 */
6421
6422 /**
6423 * Returns a callback for Array.prototype.map for mapping Representation nodes to
6424 * Segment information and attributes using inherited BaseURL nodes.
6425 *
6426 * @param {Object} adaptationSetAttributes
6427 * Contains attributes inherited by the AdaptationSet
6428 * @param {string[]} adaptationSetBaseUrls
6429 * Contains list of resolved base urls inherited by the AdaptationSet
6430 * @param {SegmentInformation} adaptationSetSegmentInfo
6431 * Contains Segment information for the AdaptationSet
6432 * @return {inheritBaseUrlsCallback}
6433 * Callback map function
6434 */
6435
6436
6437 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
6438 return function (representation) {
6439 var repBaseUrlElements = findChildren(representation, 'BaseURL');
6440 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
6441 var attributes = merge(adaptationSetAttributes, parseAttributes(representation));
6442 var representationSegmentInfo = getSegmentInformation(representation);
6443 return repBaseUrls.map(function (baseUrl) {
6444 return {
6445 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
6446 attributes: merge(attributes, {
6447 baseUrl: baseUrl
6448 })
6449 };
6450 });
6451 };
6452 };
6453 /**
6454 * Tranforms a series of content protection nodes to
6455 * an object containing pssh data by key system
6456 *
6457 * @param {Node[]} contentProtectionNodes
6458 * Content protection nodes
6459 * @return {Object}
6460 * Object containing pssh data by key system
6461 */
6462
6463
6464 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
6465 return contentProtectionNodes.reduce(function (acc, node) {
6466 var attributes = parseAttributes(node);
6467 var keySystem = keySystemsMap[attributes.schemeIdUri];
6468
6469 if (keySystem) {
6470 acc[keySystem] = {
6471 attributes: attributes
6472 };
6473 var psshNode = findChildren(node, 'cenc:pssh')[0];
6474
6475 if (psshNode) {
6476 var pssh = getContent(psshNode);
6477 var psshBuffer = pssh && decodeB64ToUint8Array(pssh);
6478 acc[keySystem].pssh = psshBuffer;
6479 }
6480 }
6481
6482 return acc;
6483 }, {});
6484 };
6485 /**
6486 * Maps an AdaptationSet node to a list of Representation information objects
6487 *
6488 * @name toRepresentationsCallback
6489 * @function
6490 * @param {Node} adaptationSet
6491 * AdaptationSet node from the mpd
6492 * @return {RepresentationInformation[]}
6493 * List of objects containing Representaion information
6494 */
6495
6496 /**
6497 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
6498 * Representation information objects
6499 *
6500 * @param {Object} periodAttributes
6501 * Contains attributes inherited by the Period
6502 * @param {string[]} periodBaseUrls
6503 * Contains list of resolved base urls inherited by the Period
6504 * @param {string[]} periodSegmentInfo
6505 * Contains Segment Information at the period level
6506 * @return {toRepresentationsCallback}
6507 * Callback map function
6508 */
6509
6510
6511 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
6512 return function (adaptationSet) {
6513 var adaptationSetAttributes = parseAttributes(adaptationSet);
6514 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
6515 var role = findChildren(adaptationSet, 'Role')[0];
6516 var roleAttributes = {
6517 role: parseAttributes(role)
6518 };
6519 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
6520 var label = findChildren(adaptationSet, 'Label')[0];
6521
6522 if (label && label.childNodes.length) {
6523 var labelVal = label.childNodes[0].nodeValue.trim();
6524 attrs = merge(attrs, {
6525 label: labelVal
6526 });
6527 }
6528
6529 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
6530
6531 if (Object.keys(contentProtection).length) {
6532 attrs = merge(attrs, {
6533 contentProtection: contentProtection
6534 });
6535 }
6536
6537 var segmentInfo = getSegmentInformation(adaptationSet);
6538 var representations = findChildren(adaptationSet, 'Representation');
6539 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
6540 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
6541 };
6542 };
6543 /**
6544 * Maps an Period node to a list of Representation inforamtion objects for all
6545 * AdaptationSet nodes contained within the Period
6546 *
6547 * @name toAdaptationSetsCallback
6548 * @function
6549 * @param {Node} period
6550 * Period node from the mpd
6551 * @param {number} periodIndex
6552 * Index of the Period within the mpd
6553 * @return {RepresentationInformation[]}
6554 * List of objects containing Representaion information
6555 */
6556
6557 /**
6558 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
6559 * Representation information objects
6560 *
6561 * @param {Object} mpdAttributes
6562 * Contains attributes inherited by the mpd
6563 * @param {string[]} mpdBaseUrls
6564 * Contains list of resolved base urls inherited by the mpd
6565 * @return {toAdaptationSetsCallback}
6566 * Callback map function
6567 */
6568
6569
6570 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
6571 return function (period, index) {
6572 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period, 'BaseURL'));
6573 var periodAtt = parseAttributes(period);
6574 var parsedPeriodId = parseInt(periodAtt.id, 10); // fallback to mapping index if Period@id is not a number
6575
6576 var periodIndex = window__default['default'].isNaN(parsedPeriodId) ? index : parsedPeriodId;
6577 var periodAttributes = merge(mpdAttributes, {
6578 periodIndex: periodIndex
6579 });
6580 var adaptationSets = findChildren(period, 'AdaptationSet');
6581 var periodSegmentInfo = getSegmentInformation(period);
6582 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
6583 };
6584 };
6585 /**
6586 * Traverses the mpd xml tree to generate a list of Representation information objects
6587 * that have inherited attributes from parent nodes
6588 *
6589 * @param {Node} mpd
6590 * The root node of the mpd
6591 * @param {Object} options
6592 * Available options for inheritAttributes
6593 * @param {string} options.manifestUri
6594 * The uri source of the mpd
6595 * @param {number} options.NOW
6596 * Current time per DASH IOP. Default is current time in ms since epoch
6597 * @param {number} options.clientOffset
6598 * Client time difference from NOW (in milliseconds)
6599 * @return {RepresentationInformation[]}
6600 * List of objects containing Representation information
6601 */
6602
6603
6604 var inheritAttributes = function inheritAttributes(mpd, options) {
6605 if (options === void 0) {
6606 options = {};
6607 }
6608
6609 var _options = options,
6610 _options$manifestUri = _options.manifestUri,
6611 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
6612 _options$NOW = _options.NOW,
6613 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
6614 _options$clientOffset = _options.clientOffset,
6615 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
6616 var periods = findChildren(mpd, 'Period');
6617
6618 if (!periods.length) {
6619 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
6620 }
6621
6622 var locations = findChildren(mpd, 'Location');
6623 var mpdAttributes = parseAttributes(mpd);
6624 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL'));
6625 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
6626 mpdAttributes.NOW = NOW;
6627 mpdAttributes.clientOffset = clientOffset;
6628
6629 if (locations.length) {
6630 mpdAttributes.locations = locations.map(getContent);
6631 }
6632
6633 return {
6634 locations: mpdAttributes.locations,
6635 representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)))
6636 };
6637 };
6638
6639 var stringToMpdXml = function stringToMpdXml(manifestString) {
6640 if (manifestString === '') {
6641 throw new Error(errors.DASH_EMPTY_MANIFEST);
6642 }
6643
6644 var parser = new xmldom.DOMParser();
6645 var xml;
6646 var mpd;
6647
6648 try {
6649 xml = parser.parseFromString(manifestString, 'application/xml');
6650 mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
6651 } catch (e) {// ie 11 throwsw on invalid xml
6652 }
6653
6654 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
6655 throw new Error(errors.DASH_INVALID_XML);
6656 }
6657
6658 return mpd;
6659 };
6660 /**
6661 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
6662 *
6663 * @param {string} mpd
6664 * XML string of the MPD manifest
6665 * @return {Object|null}
6666 * Attributes of UTCTiming node specified in the manifest. Null if none found
6667 */
6668
6669
6670 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
6671 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
6672
6673 if (!UTCTimingNode) {
6674 return null;
6675 }
6676
6677 var attributes = parseAttributes(UTCTimingNode);
6678
6679 switch (attributes.schemeIdUri) {
6680 case 'urn:mpeg:dash:utc:http-head:2014':
6681 case 'urn:mpeg:dash:utc:http-head:2012':
6682 attributes.method = 'HEAD';
6683 break;
6684
6685 case 'urn:mpeg:dash:utc:http-xsdate:2014':
6686 case 'urn:mpeg:dash:utc:http-iso:2014':
6687 case 'urn:mpeg:dash:utc:http-xsdate:2012':
6688 case 'urn:mpeg:dash:utc:http-iso:2012':
6689 attributes.method = 'GET';
6690 break;
6691
6692 case 'urn:mpeg:dash:utc:direct:2014':
6693 case 'urn:mpeg:dash:utc:direct:2012':
6694 attributes.method = 'DIRECT';
6695 attributes.value = Date.parse(attributes.value);
6696 break;
6697
6698 case 'urn:mpeg:dash:utc:http-ntp:2014':
6699 case 'urn:mpeg:dash:utc:ntp:2014':
6700 case 'urn:mpeg:dash:utc:sntp:2014':
6701 default:
6702 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
6703 }
6704
6705 return attributes;
6706 };
6707
6708 var parse = function parse(manifestString, options) {
6709 if (options === void 0) {
6710 options = {};
6711 }
6712
6713 var parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);
6714 var playlists = toPlaylists(parsedManifestInfo.representationInfo);
6715 return toM3u8(playlists, parsedManifestInfo.locations, options.sidxMapping);
6716 };
6717 /**
6718 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
6719 *
6720 * @param {string} manifestString
6721 * XML string of the MPD manifest
6722 * @return {Object|null}
6723 * Attributes of UTCTiming node specified in the manifest. Null if none found
6724 */
6725
6726
6727 var parseUTCTiming = function parseUTCTiming(manifestString) {
6728 return parseUTCTimingScheme(stringToMpdXml(manifestString));
6729 };
6730
6731 var MAX_UINT32 = Math.pow(2, 32);
6732
6733 var parseSidx = function parseSidx(data) {
6734 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
6735 result = {
6736 version: data[0],
6737 flags: new Uint8Array(data.subarray(1, 4)),
6738 references: [],
6739 referenceId: view.getUint32(4),
6740 timescale: view.getUint32(8)
6741 },
6742 i = 12;
6743
6744 if (result.version === 0) {
6745 result.earliestPresentationTime = view.getUint32(i);
6746 result.firstOffset = view.getUint32(i + 4);
6747 i += 8;
6748 } else {
6749 // read 64 bits
6750 result.earliestPresentationTime = view.getUint32(i) * MAX_UINT32 + view.getUint32(i + 4);
6751 result.firstOffset = view.getUint32(i + 8) * MAX_UINT32 + view.getUint32(i + 12);
6752 i += 16;
6753 }
6754
6755 i += 2; // reserved
6756
6757 var referenceCount = view.getUint16(i);
6758 i += 2; // start of references
6759
6760 for (; referenceCount > 0; i += 12, referenceCount--) {
6761 result.references.push({
6762 referenceType: (data[i] & 0x80) >>> 7,
6763 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
6764 subsegmentDuration: view.getUint32(i + 4),
6765 startsWithSap: !!(data[i + 8] & 0x80),
6766 sapType: (data[i + 8] & 0x70) >>> 4,
6767 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
6768 });
6769 }
6770
6771 return result;
6772 };
6773
6774 var parseSidx_1 = parseSidx;
6775
6776 // we used to do this with log2 but BigInt does not support builtin math
6777 // Math.ceil(log2(x));
6778
6779
6780 var countBits = function countBits(x) {
6781 return x.toString(2).length;
6782 }; // count the number of whole bytes it would take to represent a number
6783
6784 var countBytes = function countBytes(x) {
6785 return Math.ceil(countBits(x) / 8);
6786 };
6787 var isTypedArray = function isTypedArray(obj) {
6788 return ArrayBuffer.isView(obj);
6789 };
6790 var toUint8 = function toUint8(bytes) {
6791 if (bytes instanceof Uint8Array) {
6792 return bytes;
6793 }
6794
6795 if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
6796 // any non-number or NaN leads to empty uint8array
6797 // eslint-disable-next-line
6798 if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
6799 bytes = 0;
6800 } else {
6801 bytes = [bytes];
6802 }
6803 }
6804
6805 return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
6806 };
6807 var BigInt = window__default['default'].BigInt || Number;
6808 var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
6809 var bytesToNumber = function bytesToNumber(bytes, _temp) {
6810 var _ref = _temp === void 0 ? {} : _temp,
6811 _ref$signed = _ref.signed,
6812 signed = _ref$signed === void 0 ? false : _ref$signed,
6813 _ref$le = _ref.le,
6814 le = _ref$le === void 0 ? false : _ref$le;
6815
6816 bytes = toUint8(bytes);
6817 var fn = le ? 'reduce' : 'reduceRight';
6818 var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
6819 var number = obj.call(bytes, function (total, byte, i) {
6820 var exponent = le ? i : Math.abs(i + 1 - bytes.length);
6821 return total + BigInt(byte) * BYTE_TABLE[exponent];
6822 }, BigInt(0));
6823
6824 if (signed) {
6825 var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
6826 number = BigInt(number);
6827
6828 if (number > max) {
6829 number -= max;
6830 number -= max;
6831 number -= BigInt(2);
6832 }
6833 }
6834
6835 return Number(number);
6836 };
6837 var numberToBytes = function numberToBytes(number, _temp2) {
6838 var _ref2 = _temp2 === void 0 ? {} : _temp2,
6839 _ref2$le = _ref2.le,
6840 le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
6841
6842
6843 if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
6844 number = 0;
6845 }
6846
6847 number = BigInt(number);
6848 var byteCount = countBytes(number);
6849 var bytes = new Uint8Array(new ArrayBuffer(byteCount));
6850
6851 for (var i = 0; i < byteCount; i++) {
6852 var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
6853 bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
6854
6855 if (number < 0) {
6856 bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
6857 bytes[byteIndex] -= i === 0 ? 1 : 2;
6858 }
6859 }
6860
6861 return bytes;
6862 };
6863 var stringToBytes = function stringToBytes(string, stringIsBytes) {
6864 if (typeof string !== 'string' && string && typeof string.toString === 'function') {
6865 string = string.toString();
6866 }
6867
6868 if (typeof string !== 'string') {
6869 return new Uint8Array();
6870 } // If the string already is bytes, we don't have to do this
6871 // otherwise we do this so that we split multi length characters
6872 // into individual bytes
6873
6874
6875 if (!stringIsBytes) {
6876 string = unescape(encodeURIComponent(string));
6877 }
6878
6879 var view = new Uint8Array(string.length);
6880
6881 for (var i = 0; i < string.length; i++) {
6882 view[i] = string.charCodeAt(i);
6883 }
6884
6885 return view;
6886 };
6887 var concatTypedArrays = function concatTypedArrays() {
6888 for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
6889 buffers[_key] = arguments[_key];
6890 }
6891
6892 buffers = buffers.filter(function (b) {
6893 return b && (b.byteLength || b.length) && typeof b !== 'string';
6894 });
6895
6896 if (buffers.length <= 1) {
6897 // for 0 length we will return empty uint8
6898 // for 1 length we return the first uint8
6899 return toUint8(buffers[0]);
6900 }
6901
6902 var totalLen = buffers.reduce(function (total, buf, i) {
6903 return total + (buf.byteLength || buf.length);
6904 }, 0);
6905 var tempBuffer = new Uint8Array(totalLen);
6906 var offset = 0;
6907 buffers.forEach(function (buf) {
6908 buf = toUint8(buf);
6909 tempBuffer.set(buf, offset);
6910 offset += buf.byteLength;
6911 });
6912 return tempBuffer;
6913 };
6914 /**
6915 * Check if the bytes "b" are contained within bytes "a".
6916 *
6917 * @param {Uint8Array|Array} a
6918 * Bytes to check in
6919 *
6920 * @param {Uint8Array|Array} b
6921 * Bytes to check for
6922 *
6923 * @param {Object} options
6924 * options
6925 *
6926 * @param {Array|Uint8Array} [offset=0]
6927 * offset to use when looking at bytes in a
6928 *
6929 * @param {Array|Uint8Array} [mask=[]]
6930 * mask to use on bytes before comparison.
6931 *
6932 * @return {boolean}
6933 * If all bytes in b are inside of a, taking into account
6934 * bit masks.
6935 */
6936
6937 var bytesMatch = function bytesMatch(a, b, _temp3) {
6938 var _ref3 = _temp3 === void 0 ? {} : _temp3,
6939 _ref3$offset = _ref3.offset,
6940 offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
6941 _ref3$mask = _ref3.mask,
6942 mask = _ref3$mask === void 0 ? [] : _ref3$mask;
6943
6944 a = toUint8(a);
6945 b = toUint8(b); // ie 11 does not support uint8 every
6946
6947 var fn = b.every ? b.every : Array.prototype.every;
6948 return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
6949 fn.call(b, function (bByte, i) {
6950 var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
6951 return bByte === aByte;
6952 });
6953 };
6954
6955 var ID3 = toUint8([0x49, 0x44, 0x33]);
6956 var getId3Size = function getId3Size(bytes, offset) {
6957 if (offset === void 0) {
6958 offset = 0;
6959 }
6960
6961 bytes = toUint8(bytes);
6962 var flags = bytes[offset + 5];
6963 var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
6964 var footerPresent = (flags & 16) >> 4;
6965
6966 if (footerPresent) {
6967 return returnSize + 20;
6968 }
6969
6970 return returnSize + 10;
6971 };
6972 var getId3Offset$1 = function getId3Offset(bytes, offset) {
6973 if (offset === void 0) {
6974 offset = 0;
6975 }
6976
6977 bytes = toUint8(bytes);
6978
6979 if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
6980 offset: offset
6981 })) {
6982 return offset;
6983 }
6984
6985 offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
6986 // have multiple ID3 tag sections even though
6987 // they should not.
6988
6989 return getId3Offset(bytes, offset);
6990 };
6991
6992 var normalizePath$1 = function normalizePath(path) {
6993 if (typeof path === 'string') {
6994 return stringToBytes(path);
6995 }
6996
6997 if (typeof path === 'number') {
6998 return path;
6999 }
7000
7001 return path;
7002 };
7003
7004 var normalizePaths$1 = function normalizePaths(paths) {
7005 if (!Array.isArray(paths)) {
7006 return [normalizePath$1(paths)];
7007 }
7008
7009 return paths.map(function (p) {
7010 return normalizePath$1(p);
7011 });
7012 };
7013 /**
7014 * find any number of boxes by name given a path to it in an iso bmff
7015 * such as mp4.
7016 *
7017 * @param {TypedArray} bytes
7018 * bytes for the iso bmff to search for boxes in
7019 *
7020 * @param {Uint8Array[]|string[]|string|Uint8Array} name
7021 * An array of paths or a single path representing the name
7022 * of boxes to search through in bytes. Paths may be
7023 * uint8 (character codes) or strings.
7024 *
7025 * @param {boolean} [complete=false]
7026 * Should we search only for complete boxes on the final path.
7027 * This is very useful when you do not want to get back partial boxes
7028 * in the case of streaming files.
7029 *
7030 * @return {Uint8Array[]}
7031 * An array of the end paths that we found.
7032 */
7033
7034 var findBox$1 = function findBox(bytes, paths, complete) {
7035 if (complete === void 0) {
7036 complete = false;
7037 }
7038
7039 paths = normalizePaths$1(paths);
7040 bytes = toUint8(bytes);
7041 var results = [];
7042
7043 if (!paths.length) {
7044 // short-circuit the search for empty paths
7045 return results;
7046 }
7047
7048 var i = 0;
7049
7050 while (i < bytes.length) {
7051 var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
7052 var type = bytes.subarray(i + 4, i + 8); // invalid box format.
7053
7054 if (size === 0) {
7055 break;
7056 }
7057
7058 var end = i + size;
7059
7060 if (end > bytes.length) {
7061 // this box is bigger than the number of bytes we have
7062 // and complete is set, we cannot find any more boxes.
7063 if (complete) {
7064 break;
7065 }
7066
7067 end = bytes.length;
7068 }
7069
7070 var data = bytes.subarray(i + 8, end);
7071
7072 if (bytesMatch(type, paths[0])) {
7073 if (paths.length === 1) {
7074 // this is the end of the path and we've found the box we were
7075 // looking for
7076 results.push(data);
7077 } else {
7078 // recursively search for the next box along the path
7079 results.push.apply(results, findBox(data, paths.slice(1), complete));
7080 }
7081 }
7082
7083 i = end;
7084 } // we've finished searching all of bytes
7085
7086
7087 return results;
7088 };
7089
7090 // https://matroska-org.github.io/libebml/specs.html
7091 // https://www.matroska.org/technical/elements.html
7092 // https://www.webmproject.org/docs/container/
7093
7094 var EBML_TAGS = {
7095 EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
7096 DocType: toUint8([0x42, 0x82]),
7097 Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
7098 SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
7099 Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
7100 Track: toUint8([0xAE]),
7101 TrackNumber: toUint8([0xd7]),
7102 DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
7103 TrackEntry: toUint8([0xAE]),
7104 TrackType: toUint8([0x83]),
7105 FlagDefault: toUint8([0x88]),
7106 CodecID: toUint8([0x86]),
7107 CodecPrivate: toUint8([0x63, 0xA2]),
7108 VideoTrack: toUint8([0xe0]),
7109 AudioTrack: toUint8([0xe1]),
7110 // Not used yet, but will be used for live webm/mkv
7111 // see https://www.matroska.org/technical/basics.html#block-structure
7112 // see https://www.matroska.org/technical/basics.html#simpleblock-structure
7113 Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
7114 Timestamp: toUint8([0xE7]),
7115 TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
7116 BlockGroup: toUint8([0xA0]),
7117 BlockDuration: toUint8([0x9B]),
7118 Block: toUint8([0xA1]),
7119 SimpleBlock: toUint8([0xA3])
7120 };
7121 /**
7122 * This is a simple table to determine the length
7123 * of things in ebml. The length is one based (starts at 1,
7124 * rather than zero) and for every zero bit before a one bit
7125 * we add one to length. We also need this table because in some
7126 * case we have to xor all the length bits from another value.
7127 */
7128
7129 var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
7130
7131 var getLength = function getLength(byte) {
7132 var len = 1;
7133
7134 for (var i = 0; i < LENGTH_TABLE.length; i++) {
7135 if (byte & LENGTH_TABLE[i]) {
7136 break;
7137 }
7138
7139 len++;
7140 }
7141
7142 return len;
7143 }; // length in ebml is stored in the first 4 to 8 bits
7144 // of the first byte. 4 for the id length and 8 for the
7145 // data size length. Length is measured by converting the number to binary
7146 // then 1 + the number of zeros before a 1 is encountered starting
7147 // from the left.
7148
7149
7150 var getvint = function getvint(bytes, offset, removeLength, signed) {
7151 if (removeLength === void 0) {
7152 removeLength = true;
7153 }
7154
7155 if (signed === void 0) {
7156 signed = false;
7157 }
7158
7159 var length = getLength(bytes[offset]);
7160 var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
7161 // as they will be modified below to remove the dataSizeLen bits and we do not
7162 // want to modify the original data. normally we could just call slice on
7163 // uint8array but ie 11 does not support that...
7164
7165 if (removeLength) {
7166 valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
7167 valueBytes[0] ^= LENGTH_TABLE[length - 1];
7168 }
7169
7170 return {
7171 length: length,
7172 value: bytesToNumber(valueBytes, {
7173 signed: signed
7174 }),
7175 bytes: valueBytes
7176 };
7177 };
7178
7179 var normalizePath = function normalizePath(path) {
7180 if (typeof path === 'string') {
7181 return path.match(/.{1,2}/g).map(function (p) {
7182 return normalizePath(p);
7183 });
7184 }
7185
7186 if (typeof path === 'number') {
7187 return numberToBytes(path);
7188 }
7189
7190 return path;
7191 };
7192
7193 var normalizePaths = function normalizePaths(paths) {
7194 if (!Array.isArray(paths)) {
7195 return [normalizePath(paths)];
7196 }
7197
7198 return paths.map(function (p) {
7199 return normalizePath(p);
7200 });
7201 };
7202
7203 var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
7204 if (offset >= bytes.length) {
7205 return bytes.length;
7206 }
7207
7208 var innerid = getvint(bytes, offset, false);
7209
7210 if (bytesMatch(id.bytes, innerid.bytes)) {
7211 return offset;
7212 }
7213
7214 var dataHeader = getvint(bytes, offset + innerid.length);
7215 return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
7216 };
7217 /**
7218 * Notes on the EBLM format.
7219 *
7220 * EBLM uses "vints" tags. Every vint tag contains
7221 * two parts
7222 *
7223 * 1. The length from the first byte. You get this by
7224 * converting the byte to binary and counting the zeros
7225 * before a 1. Then you add 1 to that. Examples
7226 * 00011111 = length 4 because there are 3 zeros before a 1.
7227 * 00100000 = length 3 because there are 2 zeros before a 1.
7228 * 00000011 = length 7 because there are 6 zeros before a 1.
7229 *
7230 * 2. The bits used for length are removed from the first byte
7231 * Then all the bytes are merged into a value. NOTE: this
7232 * is not the case for id ebml tags as there id includes
7233 * length bits.
7234 *
7235 */
7236
7237
7238 var findEbml = function findEbml(bytes, paths) {
7239 paths = normalizePaths(paths);
7240 bytes = toUint8(bytes);
7241 var results = [];
7242
7243 if (!paths.length) {
7244 return results;
7245 }
7246
7247 var i = 0;
7248
7249 while (i < bytes.length) {
7250 var id = getvint(bytes, i, false);
7251 var dataHeader = getvint(bytes, i + id.length);
7252 var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
7253
7254 if (dataHeader.value === 0x7f) {
7255 dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
7256
7257 if (dataHeader.value !== bytes.length) {
7258 dataHeader.value -= dataStart;
7259 }
7260 }
7261
7262 var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
7263 var data = bytes.subarray(dataStart, dataEnd);
7264
7265 if (bytesMatch(paths[0], id.bytes)) {
7266 if (paths.length === 1) {
7267 // this is the end of the paths and we've found the tag we were
7268 // looking for
7269 results.push(data);
7270 } else {
7271 // recursively search for the next tag inside of the data
7272 // of this one
7273 results = results.concat(findEbml(data, paths.slice(1)));
7274 }
7275 }
7276
7277 var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
7278
7279 i += totalLength;
7280 }
7281
7282 return results;
7283 }; // see https://www.matroska.org/technical/basics.html#block-structure
7284
7285 var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
7286 var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
7287 var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
7288 /**
7289 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
7290 * Sequence Payload"
7291 *
7292 * @param data {Uint8Array} the bytes of a RBSP from a NAL
7293 * unit
7294 * @return {Uint8Array} the RBSP without any Emulation
7295 * Prevention Bytes
7296 */
7297
7298 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
7299 var positions = [];
7300 var i = 1; // Find all `Emulation Prevention Bytes`
7301
7302 while (i < bytes.length - 2) {
7303 if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
7304 positions.push(i + 2);
7305 i++;
7306 }
7307
7308 i++;
7309 } // If no Emulation Prevention Bytes were found just return the original
7310 // array
7311
7312
7313 if (positions.length === 0) {
7314 return bytes;
7315 } // Create a new array to hold the NAL unit data
7316
7317
7318 var newLength = bytes.length - positions.length;
7319 var newData = new Uint8Array(newLength);
7320 var sourceIndex = 0;
7321
7322 for (i = 0; i < newLength; sourceIndex++, i++) {
7323 if (sourceIndex === positions[0]) {
7324 // Skip this byte
7325 sourceIndex++; // Remove this position index
7326
7327 positions.shift();
7328 }
7329
7330 newData[i] = bytes[sourceIndex];
7331 }
7332
7333 return newData;
7334 };
7335 var findNal = function findNal(bytes, dataType, types, nalLimit) {
7336 if (nalLimit === void 0) {
7337 nalLimit = Infinity;
7338 }
7339
7340 bytes = toUint8(bytes);
7341 types = [].concat(types);
7342 var i = 0;
7343 var nalStart;
7344 var nalsFound = 0; // keep searching until:
7345 // we reach the end of bytes
7346 // we reach the maximum number of nals they want to seach
7347 // NOTE: that we disregard nalLimit when we have found the start
7348 // of the nal we want so that we can find the end of the nal we want.
7349
7350 while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
7351 var nalOffset = void 0;
7352
7353 if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
7354 nalOffset = 4;
7355 } else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
7356 nalOffset = 3;
7357 } // we are unsynced,
7358 // find the next nal unit
7359
7360
7361 if (!nalOffset) {
7362 i++;
7363 continue;
7364 }
7365
7366 nalsFound++;
7367
7368 if (nalStart) {
7369 return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
7370 }
7371
7372 var nalType = void 0;
7373
7374 if (dataType === 'h264') {
7375 nalType = bytes[i + nalOffset] & 0x1f;
7376 } else if (dataType === 'h265') {
7377 nalType = bytes[i + nalOffset] >> 1 & 0x3f;
7378 }
7379
7380 if (types.indexOf(nalType) !== -1) {
7381 nalStart = i + nalOffset;
7382 } // nal header is 1 length for h264, and 2 for h265
7383
7384
7385 i += nalOffset + (dataType === 'h264' ? 1 : 2);
7386 }
7387
7388 return bytes.subarray(0, 0);
7389 };
7390 var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
7391 return findNal(bytes, 'h264', type, nalLimit);
7392 };
7393 var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
7394 return findNal(bytes, 'h265', type, nalLimit);
7395 };
7396
7397 var CONSTANTS = {
7398 // "webm" string literal in hex
7399 'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
7400 // "matroska" string literal in hex
7401 'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
7402 // "fLaC" string literal in hex
7403 'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
7404 // "OggS" string literal in hex
7405 'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
7406 // ac-3 sync byte, also works for ec-3 as that is simply a codec
7407 // of ac-3
7408 'ac3': toUint8([0x0b, 0x77]),
7409 // "RIFF" string literal in hex used for wav and avi
7410 'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
7411 // "AVI" string literal in hex
7412 'avi': toUint8([0x41, 0x56, 0x49]),
7413 // "WAVE" string literal in hex
7414 'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
7415 // "ftyp3g" string literal in hex
7416 '3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
7417 // "ftyp" string literal in hex
7418 'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
7419 // "styp" string literal in hex
7420 'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
7421 // "ftyp" string literal in hex
7422 'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74])
7423 };
7424 var _isLikely = {
7425 aac: function aac(bytes) {
7426 var offset = getId3Offset$1(bytes);
7427 return bytesMatch(bytes, [0xFF, 0x10], {
7428 offset: offset,
7429 mask: [0xFF, 0x16]
7430 });
7431 },
7432 mp3: function mp3(bytes) {
7433 var offset = getId3Offset$1(bytes);
7434 return bytesMatch(bytes, [0xFF, 0x02], {
7435 offset: offset,
7436 mask: [0xFF, 0x06]
7437 });
7438 },
7439 webm: function webm(bytes) {
7440 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
7441
7442 return bytesMatch(docType, CONSTANTS.webm);
7443 },
7444 mkv: function mkv(bytes) {
7445 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
7446
7447 return bytesMatch(docType, CONSTANTS.matroska);
7448 },
7449 mp4: function mp4(bytes) {
7450 return !_isLikely['3gp'](bytes) && !_isLikely.mov(bytes) && (bytesMatch(bytes, CONSTANTS.mp4, {
7451 offset: 4
7452 }) || bytesMatch(bytes, CONSTANTS.fmp4, {
7453 offset: 4
7454 }));
7455 },
7456 mov: function mov(bytes) {
7457 return bytesMatch(bytes, CONSTANTS.mov, {
7458 offset: 4
7459 });
7460 },
7461 '3gp': function gp(bytes) {
7462 return bytesMatch(bytes, CONSTANTS['3gp'], {
7463 offset: 4
7464 });
7465 },
7466 ac3: function ac3(bytes) {
7467 var offset = getId3Offset$1(bytes);
7468 return bytesMatch(bytes, CONSTANTS.ac3, {
7469 offset: offset
7470 });
7471 },
7472 ts: function ts(bytes) {
7473 if (bytes.length < 189 && bytes.length >= 1) {
7474 return bytes[0] === 0x47;
7475 }
7476
7477 var i = 0; // check the first 376 bytes for two matching sync bytes
7478
7479 while (i + 188 < bytes.length && i < 188) {
7480 if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
7481 return true;
7482 }
7483
7484 i += 1;
7485 }
7486
7487 return false;
7488 },
7489 flac: function flac(bytes) {
7490 var offset = getId3Offset$1(bytes);
7491 return bytesMatch(bytes, CONSTANTS.flac, {
7492 offset: offset
7493 });
7494 },
7495 ogg: function ogg(bytes) {
7496 return bytesMatch(bytes, CONSTANTS.ogg);
7497 },
7498 avi: function avi(bytes) {
7499 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
7500 offset: 8
7501 });
7502 },
7503 wav: function wav(bytes) {
7504 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
7505 offset: 8
7506 });
7507 },
7508 'h264': function h264(bytes) {
7509 // find seq_parameter_set_rbsp
7510 return findH264Nal(bytes, 7, 3).length;
7511 },
7512 'h265': function h265(bytes) {
7513 // find video_parameter_set_rbsp or seq_parameter_set_rbsp
7514 return findH265Nal(bytes, [32, 33], 3).length;
7515 }
7516 }; // get all the isLikely functions
7517 // but make sure 'ts' is above h264 and h265
7518 // but below everything else as it is the least specific
7519
7520 var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
7521 .filter(function (t) {
7522 return t !== 'ts' && t !== 'h264' && t !== 'h265';
7523 }) // add it back to the bottom
7524 .concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
7525
7526 isLikelyTypes.forEach(function (type) {
7527 var isLikelyFn = _isLikely[type];
7528
7529 _isLikely[type] = function (bytes) {
7530 return isLikelyFn(toUint8(bytes));
7531 };
7532 }); // export after wrapping
7533
7534 var isLikely = _isLikely; // A useful list of file signatures can be found here
7535 // https://en.wikipedia.org/wiki/List_of_file_signatures
7536
7537 var detectContainerForBytes = function detectContainerForBytes(bytes) {
7538 bytes = toUint8(bytes);
7539
7540 for (var i = 0; i < isLikelyTypes.length; i++) {
7541 var type = isLikelyTypes[i];
7542
7543 if (isLikely[type](bytes)) {
7544 return type;
7545 }
7546 }
7547
7548 return '';
7549 }; // fmp4 is not a container
7550
7551 var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
7552 return findBox$1(bytes, ['moof']).length > 0;
7553 };
7554
7555 // which will only happen if the request is complete.
7556
7557 var callbackOnCompleted = function callbackOnCompleted(request, cb) {
7558 if (request.readyState === 4) {
7559 return cb();
7560 }
7561
7562 return;
7563 };
7564
7565 var containerRequest = function containerRequest(uri, xhr, cb) {
7566 var bytes = [];
7567 var id3Offset;
7568 var finished = false;
7569
7570 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
7571 req.abort();
7572 finished = true;
7573 return cb(err, req, type, _bytes);
7574 };
7575
7576 var progressListener = function progressListener(error, request) {
7577 if (finished) {
7578 return;
7579 }
7580
7581 if (error) {
7582 return endRequestAndCallback(error, request, '', bytes);
7583 } // grap the new part of content that was just downloaded
7584
7585
7586 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
7587
7588 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
7589 id3Offset = id3Offset || getId3Offset$1(bytes); // we need at least 10 bytes to determine a type
7590 // or we need at least two bytes after an id3Offset
7591
7592 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
7593 return callbackOnCompleted(request, function () {
7594 return endRequestAndCallback(error, request, '', bytes);
7595 });
7596 }
7597
7598 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
7599 // to see the second sync byte, wait until we have enough data
7600 // before declaring it ts
7601
7602 if (type === 'ts' && bytes.length < 188) {
7603 return callbackOnCompleted(request, function () {
7604 return endRequestAndCallback(error, request, '', bytes);
7605 });
7606 } // this may be an unsynced ts segment
7607 // wait for 376 bytes before detecting no container
7608
7609
7610 if (!type && bytes.length < 376) {
7611 return callbackOnCompleted(request, function () {
7612 return endRequestAndCallback(error, request, '', bytes);
7613 });
7614 }
7615
7616 return endRequestAndCallback(null, request, type, bytes);
7617 };
7618
7619 var options = {
7620 uri: uri,
7621 beforeSend: function beforeSend(request) {
7622 // this forces the browser to pass the bytes to us unprocessed
7623 request.overrideMimeType('text/plain; charset=x-user-defined');
7624 request.addEventListener('progress', function (_ref) {
7625 _ref.total;
7626 _ref.loaded;
7627 return callbackWrapper(request, null, {
7628 statusCode: request.status
7629 }, progressListener);
7630 });
7631 }
7632 };
7633 var request = xhr(options, function (error, response) {
7634 return callbackWrapper(request, error, response, progressListener);
7635 });
7636 return request;
7637 };
7638
7639 var EventTarget = videojs__default['default'].EventTarget,
7640 mergeOptions = videojs__default['default'].mergeOptions;
7641
7642 var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
7643 if (!isPlaylistUnchanged(a, b)) {
7644 return false;
7645 } // for dash the above check will often return true in scenarios where
7646 // the playlist actually has changed because mediaSequence isn't a
7647 // dash thing, and we often set it to 1. So if the playlists have the same amount
7648 // of segments we return true.
7649 // So for dash we need to make sure that the underlying segments are different.
7650 // if sidx changed then the playlists are different.
7651
7652
7653 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
7654 return false;
7655 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
7656 return false;
7657 } // one or the other does not have segments
7658 // there was a change.
7659
7660
7661 if (a.segments && !b.segments || !a.segments && b.segments) {
7662 return false;
7663 } // neither has segments nothing changed
7664
7665
7666 if (!a.segments && !b.segments) {
7667 return true;
7668 } // check segments themselves
7669
7670
7671 for (var i = 0; i < a.segments.length; i++) {
7672 var aSegment = a.segments[i];
7673 var bSegment = b.segments[i]; // if uris are different between segments there was a change
7674
7675 if (aSegment.uri !== bSegment.uri) {
7676 return false;
7677 } // neither segment has a byterange, there will be no byterange change.
7678
7679
7680 if (!aSegment.byterange && !bSegment.byterange) {
7681 continue;
7682 }
7683
7684 var aByterange = aSegment.byterange;
7685 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
7686
7687 if (aByterange && !bByterange || !aByterange && bByterange) {
7688 return false;
7689 } // if both segments have byterange with different offsets, there was a change.
7690
7691
7692 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
7693 return false;
7694 }
7695 } // if everything was the same with segments, this is the same playlist.
7696
7697
7698 return true;
7699 };
7700 /**
7701 * Parses the master XML string and updates playlist URI references.
7702 *
7703 * @param {Object} config
7704 * Object of arguments
7705 * @param {string} config.masterXml
7706 * The mpd XML
7707 * @param {string} config.srcUrl
7708 * The mpd URL
7709 * @param {Date} config.clientOffset
7710 * A time difference between server and client
7711 * @param {Object} config.sidxMapping
7712 * SIDX mappings for moof/mdat URIs and byte ranges
7713 * @return {Object}
7714 * The parsed mpd manifest object
7715 */
7716
7717
7718 var parseMasterXml = function parseMasterXml(_ref) {
7719 var masterXml = _ref.masterXml,
7720 srcUrl = _ref.srcUrl,
7721 clientOffset = _ref.clientOffset,
7722 sidxMapping = _ref.sidxMapping;
7723 var master = parse(masterXml, {
7724 manifestUri: srcUrl,
7725 clientOffset: clientOffset,
7726 sidxMapping: sidxMapping
7727 });
7728 addPropertiesToMaster(master, srcUrl);
7729 return master;
7730 };
7731 /**
7732 * Returns a new master manifest that is the result of merging an updated master manifest
7733 * into the original version.
7734 *
7735 * @param {Object} oldMaster
7736 * The old parsed mpd object
7737 * @param {Object} newMaster
7738 * The updated parsed mpd object
7739 * @return {Object}
7740 * A new object representing the original master manifest with the updated media
7741 * playlists merged in
7742 */
7743
7744 var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
7745 var noChanges = true;
7746 var update = mergeOptions(oldMaster, {
7747 // These are top level properties that can be updated
7748 duration: newMaster.duration,
7749 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
7750 }); // First update the playlists in playlist list
7751
7752 for (var i = 0; i < newMaster.playlists.length; i++) {
7753 var playlist = newMaster.playlists[i];
7754
7755 if (playlist.sidx) {
7756 var sidxKey = generateSidxKey(playlist.sidx);
7757
7758 if (sidxMapping && sidxMapping[sidxKey]) {
7759 addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
7760 }
7761 }
7762
7763 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
7764
7765 if (playlistUpdate) {
7766 update = playlistUpdate;
7767 noChanges = false;
7768 }
7769 } // Then update media group playlists
7770
7771
7772 forEachMediaGroup(newMaster, function (properties, type, group, label) {
7773 if (properties.playlists && properties.playlists.length) {
7774 var id = properties.playlists[0].id;
7775
7776 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
7777
7778 if (_playlistUpdate) {
7779 update = _playlistUpdate; // update the playlist reference within media groups
7780
7781 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
7782 noChanges = false;
7783 }
7784 }
7785 });
7786
7787 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
7788 noChanges = false;
7789 }
7790
7791 if (noChanges) {
7792 return null;
7793 }
7794
7795 return update;
7796 }; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
7797 // If the SIDXs have maps, the two maps should match,
7798 // both `a` and `b` missing SIDXs is considered matching.
7799 // If `a` or `b` but not both have a map, they aren't matching.
7800
7801 var equivalentSidx = function equivalentSidx(a, b) {
7802 var neitherMap = Boolean(!a.map && !b.map);
7803 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
7804 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
7805 }; // exported for testing
7806
7807
7808 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
7809 var newSidxMapping = {};
7810
7811 for (var id in playlists) {
7812 var playlist = playlists[id];
7813 var currentSidxInfo = playlist.sidx;
7814
7815 if (currentSidxInfo) {
7816 var key = generateSidxKey(currentSidxInfo);
7817
7818 if (!oldSidxMapping[key]) {
7819 break;
7820 }
7821
7822 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
7823
7824 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
7825 newSidxMapping[key] = oldSidxMapping[key];
7826 }
7827 }
7828 }
7829
7830 return newSidxMapping;
7831 };
7832 /**
7833 * A function that filters out changed items as they need to be requested separately.
7834 *
7835 * The method is exported for testing
7836 *
7837 * @param {Object} master the parsed mpd XML returned via mpd-parser
7838 * @param {Object} oldSidxMapping the SIDX to compare against
7839 */
7840
7841 var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
7842 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
7843 var mediaGroupSidx = videoSidx;
7844 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
7845 if (properties.playlists && properties.playlists.length) {
7846 var playlists = properties.playlists;
7847 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
7848 }
7849 });
7850 return mediaGroupSidx;
7851 };
7852
7853 var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
7854 inheritsLoose(DashPlaylistLoader, _EventTarget);
7855
7856 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
7857 // playlist loader setups from media groups will expect to be able to pass a playlist
7858 // (since there aren't external URLs to media playlists with DASH)
7859 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
7860 var _this;
7861
7862 if (options === void 0) {
7863 options = {};
7864 }
7865
7866 _this = _EventTarget.call(this) || this;
7867 _this.masterPlaylistLoader_ = masterPlaylistLoader || assertThisInitialized(_this);
7868
7869 if (!masterPlaylistLoader) {
7870 _this.isMaster_ = true;
7871 }
7872
7873 var _options = options,
7874 _options$withCredenti = _options.withCredentials,
7875 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
7876 _options$handleManife = _options.handleManifestRedirects,
7877 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
7878 _this.vhs_ = vhs;
7879 _this.withCredentials = withCredentials;
7880 _this.handleManifestRedirects = handleManifestRedirects;
7881
7882 if (!srcUrlOrPlaylist) {
7883 throw new Error('A non-empty playlist URL or object is required');
7884 } // event naming?
7885
7886
7887 _this.on('minimumUpdatePeriod', function () {
7888 _this.refreshXml_();
7889 }); // live playlist staleness timeout
7890
7891
7892 _this.on('mediaupdatetimeout', function () {
7893 _this.refreshMedia_(_this.media().id);
7894 });
7895
7896 _this.state = 'HAVE_NOTHING';
7897 _this.loadedPlaylists_ = {};
7898 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
7899 // The masterPlaylistLoader will be created with a string
7900
7901 if (_this.isMaster_) {
7902 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
7903 // once multi-period is refactored
7904
7905 _this.masterPlaylistLoader_.sidxMapping_ = {};
7906 } else {
7907 _this.childPlaylist_ = srcUrlOrPlaylist;
7908 }
7909
7910 return _this;
7911 }
7912
7913 var _proto = DashPlaylistLoader.prototype;
7914
7915 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
7916 // disposed
7917 if (!this.request) {
7918 return true;
7919 } // pending request is cleared
7920
7921
7922 this.request = null;
7923
7924 if (err) {
7925 // use the provided error object or create one
7926 // based on the request/response
7927 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
7928 status: request.status,
7929 message: 'DASH request error at URL: ' + request.uri,
7930 response: request.response,
7931 // MEDIA_ERR_NETWORK
7932 code: 2
7933 };
7934
7935 if (startingState) {
7936 this.state = startingState;
7937 }
7938
7939 this.trigger('error');
7940 return true;
7941 }
7942 }
7943 /**
7944 * Verify that the container of the sidx segment can be parsed
7945 * and if it can, get and parse that segment.
7946 */
7947 ;
7948
7949 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
7950 var _this2 = this;
7951
7952 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
7953
7954 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
7955 // keep this function async
7956 this.mediaRequest_ = window__default['default'].setTimeout(function () {
7957 return cb(false);
7958 }, 0);
7959 return;
7960 } // resolve the segment URL relative to the playlist
7961
7962
7963 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
7964
7965 var fin = function fin(err, request) {
7966 if (_this2.requestErrored_(err, request, startingState)) {
7967 return;
7968 }
7969
7970 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
7971 var sidx;
7972
7973 try {
7974 sidx = parseSidx_1(toUint8(request.response).subarray(8));
7975 } catch (e) {
7976 // sidx parsing failed.
7977 _this2.requestErrored_(e, request, startingState);
7978
7979 return;
7980 }
7981
7982 sidxMapping[sidxKey] = {
7983 sidxInfo: playlist.sidx,
7984 sidx: sidx
7985 };
7986 addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
7987 return cb(true);
7988 };
7989
7990 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
7991 if (err) {
7992 return fin(err, request);
7993 }
7994
7995 if (!container || container !== 'mp4') {
7996 return fin({
7997 status: request.status,
7998 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
7999 // response is just bytes in this case
8000 // but we really don't want to return that.
8001 response: '',
8002 playlist: playlist,
8003 internal: true,
8004 blacklistDuration: Infinity,
8005 // MEDIA_ERR_NETWORK
8006 code: 2
8007 }, request);
8008 } // if we already downloaded the sidx bytes in the container request, use them
8009
8010
8011 var _playlist$sidx$bytera = playlist.sidx.byterange,
8012 offset = _playlist$sidx$bytera.offset,
8013 length = _playlist$sidx$bytera.length;
8014
8015 if (bytes.length >= length + offset) {
8016 return fin(err, {
8017 response: bytes.subarray(offset, offset + length),
8018 status: request.status,
8019 uri: request.uri
8020 });
8021 } // otherwise request sidx bytes
8022
8023
8024 _this2.request = _this2.vhs_.xhr({
8025 uri: uri,
8026 responseType: 'arraybuffer',
8027 headers: segmentXhrHeaders({
8028 byterange: playlist.sidx.byterange
8029 })
8030 }, fin);
8031 });
8032 };
8033
8034 _proto.dispose = function dispose() {
8035 this.trigger('dispose');
8036 this.stopRequest();
8037 this.loadedPlaylists_ = {};
8038 window__default['default'].clearTimeout(this.minimumUpdatePeriodTimeout_);
8039 window__default['default'].clearTimeout(this.mediaRequest_);
8040 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
8041 this.mediaUpdateTimeout = null;
8042 this.mediaRequest_ = null;
8043 this.minimumUpdatePeriodTimeout_ = null;
8044
8045 if (this.masterPlaylistLoader_.createMupOnMedia_) {
8046 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
8047 this.masterPlaylistLoader_.createMupOnMedia_ = null;
8048 }
8049
8050 this.off();
8051 };
8052
8053 _proto.hasPendingRequest = function hasPendingRequest() {
8054 return this.request || this.mediaRequest_;
8055 };
8056
8057 _proto.stopRequest = function stopRequest() {
8058 if (this.request) {
8059 var oldRequest = this.request;
8060 this.request = null;
8061 oldRequest.onreadystatechange = null;
8062 oldRequest.abort();
8063 }
8064 };
8065
8066 _proto.media = function media(playlist) {
8067 var _this3 = this;
8068
8069 // getter
8070 if (!playlist) {
8071 return this.media_;
8072 } // setter
8073
8074
8075 if (this.state === 'HAVE_NOTHING') {
8076 throw new Error('Cannot switch media playlist from ' + this.state);
8077 }
8078
8079 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
8080
8081 if (typeof playlist === 'string') {
8082 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
8083 throw new Error('Unknown playlist URI: ' + playlist);
8084 }
8085
8086 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
8087 }
8088
8089 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
8090
8091 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
8092 this.state = 'HAVE_METADATA';
8093 this.media_ = playlist; // trigger media change if the active media has been updated
8094
8095 if (mediaChange) {
8096 this.trigger('mediachanging');
8097 this.trigger('mediachange');
8098 }
8099
8100 return;
8101 } // switching to the active playlist is a no-op
8102
8103
8104 if (!mediaChange) {
8105 return;
8106 } // switching from an already loaded playlist
8107
8108
8109 if (this.media_) {
8110 this.trigger('mediachanging');
8111 }
8112
8113 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
8114 // everything is ready just continue to haveMetadata
8115 _this3.haveMetadata({
8116 startingState: startingState,
8117 playlist: playlist
8118 });
8119 });
8120 };
8121
8122 _proto.haveMetadata = function haveMetadata(_ref2) {
8123 var startingState = _ref2.startingState,
8124 playlist = _ref2.playlist;
8125 this.state = 'HAVE_METADATA';
8126 this.loadedPlaylists_[playlist.id] = playlist;
8127 this.mediaRequest_ = null; // This will trigger loadedplaylist
8128
8129 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
8130 // to resolve setup of media groups
8131
8132 if (startingState === 'HAVE_MASTER') {
8133 this.trigger('loadedmetadata');
8134 } else {
8135 // trigger media change if the active media has been updated
8136 this.trigger('mediachange');
8137 }
8138 };
8139
8140 _proto.pause = function pause() {
8141 if (this.masterPlaylistLoader_.createMupOnMedia_) {
8142 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
8143 this.masterPlaylistLoader_.createMupOnMedia_ = null;
8144 }
8145
8146 this.stopRequest();
8147 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
8148 window__default['default'].clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
8149 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
8150 this.mediaUpdateTimeout = null;
8151
8152 if (this.state === 'HAVE_NOTHING') {
8153 // If we pause the loader before any data has been retrieved, its as if we never
8154 // started, so reset to an unstarted state.
8155 this.started = false;
8156 }
8157 };
8158
8159 _proto.load = function load(isFinalRendition) {
8160 var _this4 = this;
8161
8162 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
8163 this.mediaUpdateTimeout = null;
8164 var media = this.media();
8165
8166 if (isFinalRendition) {
8167 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
8168 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
8169 return _this4.load();
8170 }, delay);
8171 return;
8172 } // because the playlists are internal to the manifest, load should either load the
8173 // main manifest, or do nothing but trigger an event
8174
8175
8176 if (!this.started) {
8177 this.start();
8178 return;
8179 }
8180
8181 if (media && !media.endList) {
8182 this.trigger('mediaupdatetimeout');
8183 } else {
8184 this.trigger('loadedplaylist');
8185 }
8186 };
8187
8188 _proto.start = function start() {
8189 var _this5 = this;
8190
8191 this.started = true; // We don't need to request the master manifest again
8192 // Call this asynchronously to match the xhr request behavior below
8193
8194 if (!this.isMaster_) {
8195 this.mediaRequest_ = window__default['default'].setTimeout(function () {
8196 return _this5.haveMaster_();
8197 }, 0);
8198 return;
8199 }
8200
8201 this.requestMaster_(function (req, masterChanged) {
8202 _this5.haveMaster_();
8203
8204 if (!_this5.hasPendingRequest() && !_this5.media_) {
8205 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
8206 }
8207 });
8208 };
8209
8210 _proto.requestMaster_ = function requestMaster_(cb) {
8211 var _this6 = this;
8212
8213 this.request = this.vhs_.xhr({
8214 uri: this.masterPlaylistLoader_.srcUrl,
8215 withCredentials: this.withCredentials
8216 }, function (error, req) {
8217 if (_this6.requestErrored_(error, req)) {
8218 if (_this6.state === 'HAVE_NOTHING') {
8219 _this6.started = false;
8220 }
8221
8222 return;
8223 }
8224
8225 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
8226 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
8227
8228 if (req.responseHeaders && req.responseHeaders.date) {
8229 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
8230 } else {
8231 _this6.masterLoaded_ = Date.now();
8232 }
8233
8234 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
8235
8236 if (masterChanged) {
8237 _this6.handleMaster_();
8238
8239 _this6.syncClientServerClock_(function () {
8240 return cb(req, masterChanged);
8241 });
8242
8243 return;
8244 }
8245
8246 return cb(req, masterChanged);
8247 });
8248 }
8249 /**
8250 * Parses the master xml for UTCTiming node to sync the client clock to the server
8251 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
8252 *
8253 * @param {Function} done
8254 * Function to call when clock sync has completed
8255 */
8256 ;
8257
8258 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
8259 var _this7 = this;
8260
8261 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
8262 // server clock
8263
8264 if (utcTiming === null) {
8265 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
8266 return done();
8267 }
8268
8269 if (utcTiming.method === 'DIRECT') {
8270 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
8271 return done();
8272 }
8273
8274 this.request = this.vhs_.xhr({
8275 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
8276 method: utcTiming.method,
8277 withCredentials: this.withCredentials
8278 }, function (error, req) {
8279 // disposed
8280 if (!_this7.request) {
8281 return;
8282 }
8283
8284 if (error) {
8285 // sync request failed, fall back to using date header from mpd
8286 // TODO: log warning
8287 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
8288 return done();
8289 }
8290
8291 var serverTime;
8292
8293 if (utcTiming.method === 'HEAD') {
8294 if (!req.responseHeaders || !req.responseHeaders.date) {
8295 // expected date header not preset, fall back to using date header from mpd
8296 // TODO: log warning
8297 serverTime = _this7.masterLoaded_;
8298 } else {
8299 serverTime = Date.parse(req.responseHeaders.date);
8300 }
8301 } else {
8302 serverTime = Date.parse(req.responseText);
8303 }
8304
8305 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
8306 done();
8307 });
8308 };
8309
8310 _proto.haveMaster_ = function haveMaster_() {
8311 this.state = 'HAVE_MASTER';
8312
8313 if (this.isMaster_) {
8314 // We have the master playlist at this point, so
8315 // trigger this to allow MasterPlaylistController
8316 // to make an initial playlist selection
8317 this.trigger('loadedplaylist');
8318 } else if (!this.media_) {
8319 // no media playlist was specifically selected so select
8320 // the one the child playlist loader was created with
8321 this.media(this.childPlaylist_);
8322 }
8323 };
8324
8325 _proto.handleMaster_ = function handleMaster_() {
8326 // clear media request
8327 this.mediaRequest_ = null;
8328 var newMaster = parseMasterXml({
8329 masterXml: this.masterPlaylistLoader_.masterXml_,
8330 srcUrl: this.masterPlaylistLoader_.srcUrl,
8331 clientOffset: this.masterPlaylistLoader_.clientOffset_,
8332 sidxMapping: this.masterPlaylistLoader_.sidxMapping_
8333 });
8334 var oldMaster = this.masterPlaylistLoader_.master; // if we have an old master to compare the new master against
8335
8336 if (oldMaster) {
8337 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
8338 } // only update master if we have a new master
8339
8340
8341 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
8342 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
8343
8344 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
8345 this.masterPlaylistLoader_.srcUrl = location;
8346 }
8347
8348 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
8349 this.updateMinimumUpdatePeriodTimeout_();
8350 }
8351
8352 return Boolean(newMaster);
8353 };
8354
8355 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
8356 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
8357 // a new one will be added if needed.
8358
8359 if (mpl.createMupOnMedia_) {
8360 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
8361 mpl.createMupOnMedia_ = null;
8362 } // clear any pending timeouts
8363
8364
8365 if (mpl.minimumUpdatePeriodTimeout_) {
8366 window__default['default'].clearTimeout(mpl.minimumUpdatePeriodTimeout_);
8367 mpl.minimumUpdatePeriodTimeout_ = null;
8368 }
8369
8370 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
8371 // MPD has no future validity, so a new one will need to be acquired when new
8372 // media segments are to be made available. Thus, we use the target duration
8373 // in this case
8374
8375 if (mup === 0) {
8376 if (mpl.media()) {
8377 mup = mpl.media().targetDuration * 1000;
8378 } else {
8379 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
8380 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
8381 }
8382 } // if minimumUpdatePeriod is invalid or <= zero, which
8383 // can happen when a live video becomes VOD. skip timeout
8384 // creation.
8385
8386
8387 if (typeof mup !== 'number' || mup <= 0) {
8388 if (mup < 0) {
8389 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
8390 }
8391
8392 return;
8393 }
8394
8395 this.createMUPTimeout_(mup);
8396 };
8397
8398 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
8399 var mpl = this.masterPlaylistLoader_;
8400 mpl.minimumUpdatePeriodTimeout_ = window__default['default'].setTimeout(function () {
8401 mpl.minimumUpdatePeriodTimeout_ = null;
8402 mpl.trigger('minimumUpdatePeriod');
8403 mpl.createMUPTimeout_(mup);
8404 }, mup);
8405 }
8406 /**
8407 * Sends request to refresh the master xml and updates the parsed master manifest
8408 */
8409 ;
8410
8411 _proto.refreshXml_ = function refreshXml_() {
8412 var _this8 = this;
8413
8414 this.requestMaster_(function (req, masterChanged) {
8415 if (!masterChanged) {
8416 return;
8417 }
8418
8419 if (_this8.media_) {
8420 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
8421 } // This will filter out updated sidx info from the mapping
8422
8423
8424 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
8425
8426 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
8427 // TODO: do we need to reload the current playlist?
8428 _this8.refreshMedia_(_this8.media().id);
8429 });
8430 });
8431 }
8432 /**
8433 * Refreshes the media playlist by re-parsing the master xml and updating playlist
8434 * references. If this is an alternate loader, the updated parsed manifest is retrieved
8435 * from the master loader.
8436 */
8437 ;
8438
8439 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
8440 var _this9 = this;
8441
8442 if (!mediaID) {
8443 throw new Error('refreshMedia_ must take a media id');
8444 } // for master we have to reparse the master xml
8445 // to re-create segments based on current timing values
8446 // which may change media. We only skip updating master
8447 // if this is the first time this.media_ is being set.
8448 // as master was just parsed in that case.
8449
8450
8451 if (this.media_ && this.isMaster_) {
8452 this.handleMaster_();
8453 }
8454
8455 var playlists = this.masterPlaylistLoader_.master.playlists;
8456 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
8457
8458 if (mediaChanged) {
8459 this.media_ = playlists[mediaID];
8460 } else {
8461 this.trigger('playlistunchanged');
8462 }
8463
8464 if (!this.mediaUpdateTimeout) {
8465 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
8466 if (_this9.media().endList) {
8467 return;
8468 }
8469
8470 _this9.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
8471 _this9.trigger('mediaupdatetimeout');
8472
8473 createMediaUpdateTimeout();
8474 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
8475 };
8476
8477 createMediaUpdateTimeout();
8478 }
8479
8480 this.trigger('loadedplaylist');
8481 };
8482
8483 return DashPlaylistLoader;
8484 }(EventTarget);
8485
8486 var Config = {
8487 GOAL_BUFFER_LENGTH: 30,
8488 MAX_GOAL_BUFFER_LENGTH: 60,
8489 BACK_BUFFER_LENGTH: 30,
8490 GOAL_BUFFER_LENGTH_RATE: 1,
8491 // 0.5 MB/s
8492 INITIAL_BANDWIDTH: 4194304,
8493 // A fudge factor to apply to advertised playlist bitrates to account for
8494 // temporary flucations in client bandwidth
8495 BANDWIDTH_VARIANCE: 1.2,
8496 // How much of the buffer must be filled before we consider upswitching
8497 BUFFER_LOW_WATER_LINE: 0,
8498 MAX_BUFFER_LOW_WATER_LINE: 30,
8499 // TODO: Remove this when experimentalBufferBasedABR is removed
8500 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
8501 BUFFER_LOW_WATER_LINE_RATE: 1,
8502 // If the buffer is greater than the high water line, we won't switch down
8503 BUFFER_HIGH_WATER_LINE: 30
8504 };
8505
8506 var stringToArrayBuffer = function stringToArrayBuffer(string) {
8507 var view = new Uint8Array(new ArrayBuffer(string.length));
8508
8509 for (var i = 0; i < string.length; i++) {
8510 view[i] = string.charCodeAt(i);
8511 }
8512
8513 return view.buffer;
8514 };
8515
8516 /* global Blob, BlobBuilder, Worker */
8517 // unify worker interface
8518 var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
8519 // node only supports on/off
8520 workerObj.on = workerObj.addEventListener;
8521 workerObj.off = workerObj.removeEventListener;
8522 return workerObj;
8523 };
8524
8525 var createObjectURL = function createObjectURL(str) {
8526 try {
8527 return URL.createObjectURL(new Blob([str], {
8528 type: 'application/javascript'
8529 }));
8530 } catch (e) {
8531 var blob = new BlobBuilder();
8532 blob.append(str);
8533 return URL.createObjectURL(blob.getBlob());
8534 }
8535 };
8536
8537 var factory = function factory(code) {
8538 return function () {
8539 var objectUrl = createObjectURL(code);
8540 var worker = browserWorkerPolyFill(new Worker(objectUrl));
8541 worker.objURL = objectUrl;
8542 var terminate = worker.terminate;
8543 worker.on = worker.addEventListener;
8544 worker.off = worker.removeEventListener;
8545
8546 worker.terminate = function () {
8547 URL.revokeObjectURL(objectUrl);
8548 return terminate.call(this);
8549 };
8550
8551 return worker;
8552 };
8553 };
8554 var transform = function transform(code) {
8555 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
8556 };
8557
8558 var getWorkerString = function getWorkerString(fn) {
8559 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
8560 };
8561
8562 /* rollup-plugin-worker-factory start for worker!/Users/gkatsevman/p/http-streaming-release/src/transmuxer-worker.js */
8563 var workerCode$1 = transform(getWorkerString(function () {
8564 /**
8565 * mux.js
8566 *
8567 * Copyright (c) Brightcove
8568 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8569 *
8570 * A lightweight readable stream implemention that handles event dispatching.
8571 * Objects that inherit from streams should call init in their constructors.
8572 */
8573
8574 var Stream = function Stream() {
8575 this.init = function () {
8576 var listeners = {};
8577 /**
8578 * Add a listener for a specified event type.
8579 * @param type {string} the event name
8580 * @param listener {function} the callback to be invoked when an event of
8581 * the specified type occurs
8582 */
8583
8584 this.on = function (type, listener) {
8585 if (!listeners[type]) {
8586 listeners[type] = [];
8587 }
8588
8589 listeners[type] = listeners[type].concat(listener);
8590 };
8591 /**
8592 * Remove a listener for a specified event type.
8593 * @param type {string} the event name
8594 * @param listener {function} a function previously registered for this
8595 * type of event through `on`
8596 */
8597
8598
8599 this.off = function (type, listener) {
8600 var index;
8601
8602 if (!listeners[type]) {
8603 return false;
8604 }
8605
8606 index = listeners[type].indexOf(listener);
8607 listeners[type] = listeners[type].slice();
8608 listeners[type].splice(index, 1);
8609 return index > -1;
8610 };
8611 /**
8612 * Trigger an event of the specified type on this stream. Any additional
8613 * arguments to this function are passed as parameters to event listeners.
8614 * @param type {string} the event name
8615 */
8616
8617
8618 this.trigger = function (type) {
8619 var callbacks, i, length, args;
8620 callbacks = listeners[type];
8621
8622 if (!callbacks) {
8623 return;
8624 } // Slicing the arguments on every invocation of this method
8625 // can add a significant amount of overhead. Avoid the
8626 // intermediate object creation for the common case of a
8627 // single callback argument
8628
8629
8630 if (arguments.length === 2) {
8631 length = callbacks.length;
8632
8633 for (i = 0; i < length; ++i) {
8634 callbacks[i].call(this, arguments[1]);
8635 }
8636 } else {
8637 args = [];
8638 i = arguments.length;
8639
8640 for (i = 1; i < arguments.length; ++i) {
8641 args.push(arguments[i]);
8642 }
8643
8644 length = callbacks.length;
8645
8646 for (i = 0; i < length; ++i) {
8647 callbacks[i].apply(this, args);
8648 }
8649 }
8650 };
8651 /**
8652 * Destroys the stream and cleans up.
8653 */
8654
8655
8656 this.dispose = function () {
8657 listeners = {};
8658 };
8659 };
8660 };
8661 /**
8662 * Forwards all `data` events on this stream to the destination stream. The
8663 * destination stream should provide a method `push` to receive the data
8664 * events as they arrive.
8665 * @param destination {stream} the stream that will receive all `data` events
8666 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
8667 * when the current stream emits a 'done' event
8668 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
8669 */
8670
8671
8672 Stream.prototype.pipe = function (destination) {
8673 this.on('data', function (data) {
8674 destination.push(data);
8675 });
8676 this.on('done', function (flushSource) {
8677 destination.flush(flushSource);
8678 });
8679 this.on('partialdone', function (flushSource) {
8680 destination.partialFlush(flushSource);
8681 });
8682 this.on('endedtimeline', function (flushSource) {
8683 destination.endTimeline(flushSource);
8684 });
8685 this.on('reset', function (flushSource) {
8686 destination.reset(flushSource);
8687 });
8688 return destination;
8689 }; // Default stream functions that are expected to be overridden to perform
8690 // actual work. These are provided by the prototype as a sort of no-op
8691 // implementation so that we don't have to check for their existence in the
8692 // `pipe` function above.
8693
8694
8695 Stream.prototype.push = function (data) {
8696 this.trigger('data', data);
8697 };
8698
8699 Stream.prototype.flush = function (flushSource) {
8700 this.trigger('done', flushSource);
8701 };
8702
8703 Stream.prototype.partialFlush = function (flushSource) {
8704 this.trigger('partialdone', flushSource);
8705 };
8706
8707 Stream.prototype.endTimeline = function (flushSource) {
8708 this.trigger('endedtimeline', flushSource);
8709 };
8710
8711 Stream.prototype.reset = function (flushSource) {
8712 this.trigger('reset', flushSource);
8713 };
8714
8715 var stream = Stream;
8716 /**
8717 * mux.js
8718 *
8719 * Copyright (c) Brightcove
8720 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8721 *
8722 * Functions that generate fragmented MP4s suitable for use with Media
8723 * Source Extensions.
8724 */
8725
8726 var UINT32_MAX = Math.pow(2, 32) - 1;
8727 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
8728
8729 (function () {
8730 var i;
8731 types = {
8732 avc1: [],
8733 // codingname
8734 avcC: [],
8735 btrt: [],
8736 dinf: [],
8737 dref: [],
8738 esds: [],
8739 ftyp: [],
8740 hdlr: [],
8741 mdat: [],
8742 mdhd: [],
8743 mdia: [],
8744 mfhd: [],
8745 minf: [],
8746 moof: [],
8747 moov: [],
8748 mp4a: [],
8749 // codingname
8750 mvex: [],
8751 mvhd: [],
8752 pasp: [],
8753 sdtp: [],
8754 smhd: [],
8755 stbl: [],
8756 stco: [],
8757 stsc: [],
8758 stsd: [],
8759 stsz: [],
8760 stts: [],
8761 styp: [],
8762 tfdt: [],
8763 tfhd: [],
8764 traf: [],
8765 trak: [],
8766 trun: [],
8767 trex: [],
8768 tkhd: [],
8769 vmhd: []
8770 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
8771 // don't throw an error
8772
8773 if (typeof Uint8Array === 'undefined') {
8774 return;
8775 }
8776
8777 for (i in types) {
8778 if (types.hasOwnProperty(i)) {
8779 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
8780 }
8781 }
8782
8783 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
8784 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
8785 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
8786 VIDEO_HDLR = new Uint8Array([0x00, // version 0
8787 0x00, 0x00, 0x00, // flags
8788 0x00, 0x00, 0x00, 0x00, // pre_defined
8789 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
8790 0x00, 0x00, 0x00, 0x00, // reserved
8791 0x00, 0x00, 0x00, 0x00, // reserved
8792 0x00, 0x00, 0x00, 0x00, // reserved
8793 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
8794 ]);
8795 AUDIO_HDLR = new Uint8Array([0x00, // version 0
8796 0x00, 0x00, 0x00, // flags
8797 0x00, 0x00, 0x00, 0x00, // pre_defined
8798 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
8799 0x00, 0x00, 0x00, 0x00, // reserved
8800 0x00, 0x00, 0x00, 0x00, // reserved
8801 0x00, 0x00, 0x00, 0x00, // reserved
8802 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
8803 ]);
8804 HDLR_TYPES = {
8805 video: VIDEO_HDLR,
8806 audio: AUDIO_HDLR
8807 };
8808 DREF = new Uint8Array([0x00, // version 0
8809 0x00, 0x00, 0x00, // flags
8810 0x00, 0x00, 0x00, 0x01, // entry_count
8811 0x00, 0x00, 0x00, 0x0c, // entry_size
8812 0x75, 0x72, 0x6c, 0x20, // 'url' type
8813 0x00, // version 0
8814 0x00, 0x00, 0x01 // entry_flags
8815 ]);
8816 SMHD = new Uint8Array([0x00, // version
8817 0x00, 0x00, 0x00, // flags
8818 0x00, 0x00, // balance, 0 means centered
8819 0x00, 0x00 // reserved
8820 ]);
8821 STCO = new Uint8Array([0x00, // version
8822 0x00, 0x00, 0x00, // flags
8823 0x00, 0x00, 0x00, 0x00 // entry_count
8824 ]);
8825 STSC = STCO;
8826 STSZ = new Uint8Array([0x00, // version
8827 0x00, 0x00, 0x00, // flags
8828 0x00, 0x00, 0x00, 0x00, // sample_size
8829 0x00, 0x00, 0x00, 0x00 // sample_count
8830 ]);
8831 STTS = STCO;
8832 VMHD = new Uint8Array([0x00, // version
8833 0x00, 0x00, 0x01, // flags
8834 0x00, 0x00, // graphicsmode
8835 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
8836 ]);
8837 })();
8838
8839 box = function box(type) {
8840 var payload = [],
8841 size = 0,
8842 i,
8843 result,
8844 view;
8845
8846 for (i = 1; i < arguments.length; i++) {
8847 payload.push(arguments[i]);
8848 }
8849
8850 i = payload.length; // calculate the total size we need to allocate
8851
8852 while (i--) {
8853 size += payload[i].byteLength;
8854 }
8855
8856 result = new Uint8Array(size + 8);
8857 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
8858 view.setUint32(0, result.byteLength);
8859 result.set(type, 4); // copy the payload into the result
8860
8861 for (i = 0, size = 8; i < payload.length; i++) {
8862 result.set(payload[i], size);
8863 size += payload[i].byteLength;
8864 }
8865
8866 return result;
8867 };
8868
8869 dinf = function dinf() {
8870 return box(types.dinf, box(types.dref, DREF));
8871 };
8872
8873 esds = function esds(track) {
8874 return box(types.esds, new Uint8Array([0x00, // version
8875 0x00, 0x00, 0x00, // flags
8876 // ES_Descriptor
8877 0x03, // tag, ES_DescrTag
8878 0x19, // length
8879 0x00, 0x00, // ES_ID
8880 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
8881 // DecoderConfigDescriptor
8882 0x04, // tag, DecoderConfigDescrTag
8883 0x11, // length
8884 0x40, // object type
8885 0x15, // streamType
8886 0x00, 0x06, 0x00, // bufferSizeDB
8887 0x00, 0x00, 0xda, 0xc0, // maxBitrate
8888 0x00, 0x00, 0xda, 0xc0, // avgBitrate
8889 // DecoderSpecificInfo
8890 0x05, // tag, DecoderSpecificInfoTag
8891 0x02, // length
8892 // ISO/IEC 14496-3, AudioSpecificConfig
8893 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
8894 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
8895 ]));
8896 };
8897
8898 ftyp = function ftyp() {
8899 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
8900 };
8901
8902 hdlr = function hdlr(type) {
8903 return box(types.hdlr, HDLR_TYPES[type]);
8904 };
8905
8906 mdat = function mdat(data) {
8907 return box(types.mdat, data);
8908 };
8909
8910 mdhd = function mdhd(track) {
8911 var result = new Uint8Array([0x00, // version 0
8912 0x00, 0x00, 0x00, // flags
8913 0x00, 0x00, 0x00, 0x02, // creation_time
8914 0x00, 0x00, 0x00, 0x03, // modification_time
8915 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
8916 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
8917 0x55, 0xc4, // 'und' language (undetermined)
8918 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
8919 // defined. The sample rate can be parsed out of an ADTS header, for
8920 // instance.
8921
8922 if (track.samplerate) {
8923 result[12] = track.samplerate >>> 24 & 0xFF;
8924 result[13] = track.samplerate >>> 16 & 0xFF;
8925 result[14] = track.samplerate >>> 8 & 0xFF;
8926 result[15] = track.samplerate & 0xFF;
8927 }
8928
8929 return box(types.mdhd, result);
8930 };
8931
8932 mdia = function mdia(track) {
8933 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
8934 };
8935
8936 mfhd = function mfhd(sequenceNumber) {
8937 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
8938 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
8939 ]));
8940 };
8941
8942 minf = function minf(track) {
8943 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
8944 };
8945
8946 moof = function moof(sequenceNumber, tracks) {
8947 var trackFragments = [],
8948 i = tracks.length; // build traf boxes for each track fragment
8949
8950 while (i--) {
8951 trackFragments[i] = traf(tracks[i]);
8952 }
8953
8954 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
8955 };
8956 /**
8957 * Returns a movie box.
8958 * @param tracks {array} the tracks associated with this movie
8959 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
8960 */
8961
8962
8963 moov = function moov(tracks) {
8964 var i = tracks.length,
8965 boxes = [];
8966
8967 while (i--) {
8968 boxes[i] = trak(tracks[i]);
8969 }
8970
8971 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
8972 };
8973
8974 mvex = function mvex(tracks) {
8975 var i = tracks.length,
8976 boxes = [];
8977
8978 while (i--) {
8979 boxes[i] = trex(tracks[i]);
8980 }
8981
8982 return box.apply(null, [types.mvex].concat(boxes));
8983 };
8984
8985 mvhd = function mvhd(duration) {
8986 var bytes = new Uint8Array([0x00, // version 0
8987 0x00, 0x00, 0x00, // flags
8988 0x00, 0x00, 0x00, 0x01, // creation_time
8989 0x00, 0x00, 0x00, 0x02, // modification_time
8990 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
8991 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
8992 0x00, 0x01, 0x00, 0x00, // 1.0 rate
8993 0x01, 0x00, // 1.0 volume
8994 0x00, 0x00, // reserved
8995 0x00, 0x00, 0x00, 0x00, // reserved
8996 0x00, 0x00, 0x00, 0x00, // reserved
8997 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
8998 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
8999 0xff, 0xff, 0xff, 0xff // next_track_ID
9000 ]);
9001 return box(types.mvhd, bytes);
9002 };
9003
9004 sdtp = function sdtp(track) {
9005 var samples = track.samples || [],
9006 bytes = new Uint8Array(4 + samples.length),
9007 flags,
9008 i; // leave the full box header (4 bytes) all zero
9009 // write the sample table
9010
9011 for (i = 0; i < samples.length; i++) {
9012 flags = samples[i].flags;
9013 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
9014 }
9015
9016 return box(types.sdtp, bytes);
9017 };
9018
9019 stbl = function stbl(track) {
9020 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
9021 };
9022
9023 (function () {
9024 var videoSample, audioSample;
9025
9026 stsd = function stsd(track) {
9027 return box(types.stsd, new Uint8Array([0x00, // version 0
9028 0x00, 0x00, 0x00, // flags
9029 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
9030 };
9031
9032 videoSample = function videoSample(track) {
9033 var sps = track.sps || [],
9034 pps = track.pps || [],
9035 sequenceParameterSets = [],
9036 pictureParameterSets = [],
9037 i,
9038 avc1Box; // assemble the SPSs
9039
9040 for (i = 0; i < sps.length; i++) {
9041 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
9042 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
9043
9044 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
9045 } // assemble the PPSs
9046
9047
9048 for (i = 0; i < pps.length; i++) {
9049 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
9050 pictureParameterSets.push(pps[i].byteLength & 0xFF);
9051 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
9052 }
9053
9054 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
9055 0x00, 0x01, // data_reference_index
9056 0x00, 0x00, // pre_defined
9057 0x00, 0x00, // reserved
9058 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
9059 (track.width & 0xff00) >> 8, track.width & 0xff, // width
9060 (track.height & 0xff00) >> 8, track.height & 0xff, // height
9061 0x00, 0x48, 0x00, 0x00, // horizresolution
9062 0x00, 0x48, 0x00, 0x00, // vertresolution
9063 0x00, 0x00, 0x00, 0x00, // reserved
9064 0x00, 0x01, // frame_count
9065 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
9066 0x00, 0x18, // depth = 24
9067 0x11, 0x11 // pre_defined = -1
9068 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
9069 track.profileIdc, // AVCProfileIndication
9070 track.profileCompatibility, // profile_compatibility
9071 track.levelIdc, // AVCLevelIndication
9072 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
9073 ].concat([sps.length], // numOfSequenceParameterSets
9074 sequenceParameterSets, // "SPS"
9075 [pps.length], // numOfPictureParameterSets
9076 pictureParameterSets // "PPS"
9077 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
9078 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
9079 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
9080 ]))];
9081
9082 if (track.sarRatio) {
9083 var hSpacing = track.sarRatio[0],
9084 vSpacing = track.sarRatio[1];
9085 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
9086 }
9087
9088 return box.apply(null, avc1Box);
9089 };
9090
9091 audioSample = function audioSample(track) {
9092 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
9093 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
9094 0x00, 0x01, // data_reference_index
9095 // AudioSampleEntry, ISO/IEC 14496-12
9096 0x00, 0x00, 0x00, 0x00, // reserved
9097 0x00, 0x00, 0x00, 0x00, // reserved
9098 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
9099 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
9100 0x00, 0x00, // pre_defined
9101 0x00, 0x00, // reserved
9102 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
9103 // MP4AudioSampleEntry, ISO/IEC 14496-14
9104 ]), esds(track));
9105 };
9106 })();
9107
9108 tkhd = function tkhd(track) {
9109 var result = new Uint8Array([0x00, // version 0
9110 0x00, 0x00, 0x07, // flags
9111 0x00, 0x00, 0x00, 0x00, // creation_time
9112 0x00, 0x00, 0x00, 0x00, // modification_time
9113 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
9114 0x00, 0x00, 0x00, 0x00, // reserved
9115 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
9116 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
9117 0x00, 0x00, // layer
9118 0x00, 0x00, // alternate_group
9119 0x01, 0x00, // non-audio track volume
9120 0x00, 0x00, // reserved
9121 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
9122 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
9123 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
9124 ]);
9125 return box(types.tkhd, result);
9126 };
9127 /**
9128 * Generate a track fragment (traf) box. A traf box collects metadata
9129 * about tracks in a movie fragment (moof) box.
9130 */
9131
9132
9133 traf = function traf(track) {
9134 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
9135 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
9136 0x00, 0x00, 0x3a, // flags
9137 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
9138 0x00, 0x00, 0x00, 0x01, // sample_description_index
9139 0x00, 0x00, 0x00, 0x00, // default_sample_duration
9140 0x00, 0x00, 0x00, 0x00, // default_sample_size
9141 0x00, 0x00, 0x00, 0x00 // default_sample_flags
9142 ]));
9143 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
9144 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
9145 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
9146 0x00, 0x00, 0x00, // flags
9147 // baseMediaDecodeTime
9148 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
9149 // the containing moof to the first payload byte of the associated
9150 // mdat
9151
9152 dataOffset = 32 + // tfhd
9153 20 + // tfdt
9154 8 + // traf header
9155 16 + // mfhd
9156 8 + // moof header
9157 8; // mdat header
9158 // audio tracks require less metadata
9159
9160 if (track.type === 'audio') {
9161 trackFragmentRun = trun$1(track, dataOffset);
9162 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
9163 } // video tracks should contain an independent and disposable samples
9164 // box (sdtp)
9165 // generate one and adjust offsets to match
9166
9167
9168 sampleDependencyTable = sdtp(track);
9169 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
9170 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
9171 };
9172 /**
9173 * Generate a track box.
9174 * @param track {object} a track definition
9175 * @return {Uint8Array} the track box
9176 */
9177
9178
9179 trak = function trak(track) {
9180 track.duration = track.duration || 0xffffffff;
9181 return box(types.trak, tkhd(track), mdia(track));
9182 };
9183
9184 trex = function trex(track) {
9185 var result = new Uint8Array([0x00, // version 0
9186 0x00, 0x00, 0x00, // flags
9187 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
9188 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
9189 0x00, 0x00, 0x00, 0x00, // default_sample_duration
9190 0x00, 0x00, 0x00, 0x00, // default_sample_size
9191 0x00, 0x01, 0x00, 0x01 // default_sample_flags
9192 ]); // the last two bytes of default_sample_flags is the sample
9193 // degradation priority, a hint about the importance of this sample
9194 // relative to others. Lower the degradation priority for all sample
9195 // types other than video.
9196
9197 if (track.type !== 'video') {
9198 result[result.length - 1] = 0x00;
9199 }
9200
9201 return box(types.trex, result);
9202 };
9203
9204 (function () {
9205 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
9206 // duration is present for the first sample, it will be present for
9207 // all subsequent samples.
9208 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
9209
9210 trunHeader = function trunHeader(samples, offset) {
9211 var durationPresent = 0,
9212 sizePresent = 0,
9213 flagsPresent = 0,
9214 compositionTimeOffset = 0; // trun flag constants
9215
9216 if (samples.length) {
9217 if (samples[0].duration !== undefined) {
9218 durationPresent = 0x1;
9219 }
9220
9221 if (samples[0].size !== undefined) {
9222 sizePresent = 0x2;
9223 }
9224
9225 if (samples[0].flags !== undefined) {
9226 flagsPresent = 0x4;
9227 }
9228
9229 if (samples[0].compositionTimeOffset !== undefined) {
9230 compositionTimeOffset = 0x8;
9231 }
9232 }
9233
9234 return [0x00, // version 0
9235 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
9236 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
9237 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
9238 ];
9239 };
9240
9241 videoTrun = function videoTrun(track, offset) {
9242 var bytesOffest, bytes, header, samples, sample, i;
9243 samples = track.samples || [];
9244 offset += 8 + 12 + 16 * samples.length;
9245 header = trunHeader(samples, offset);
9246 bytes = new Uint8Array(header.length + samples.length * 16);
9247 bytes.set(header);
9248 bytesOffest = header.length;
9249
9250 for (i = 0; i < samples.length; i++) {
9251 sample = samples[i];
9252 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
9253 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
9254 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
9255 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
9256
9257 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
9258 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
9259 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
9260 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
9261
9262 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
9263 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
9264 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
9265 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
9266
9267 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
9268 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
9269 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
9270 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
9271 }
9272
9273 return box(types.trun, bytes);
9274 };
9275
9276 audioTrun = function audioTrun(track, offset) {
9277 var bytes, bytesOffest, header, samples, sample, i;
9278 samples = track.samples || [];
9279 offset += 8 + 12 + 8 * samples.length;
9280 header = trunHeader(samples, offset);
9281 bytes = new Uint8Array(header.length + samples.length * 8);
9282 bytes.set(header);
9283 bytesOffest = header.length;
9284
9285 for (i = 0; i < samples.length; i++) {
9286 sample = samples[i];
9287 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
9288 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
9289 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
9290 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
9291
9292 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
9293 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
9294 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
9295 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
9296 }
9297
9298 return box(types.trun, bytes);
9299 };
9300
9301 trun$1 = function trun(track, offset) {
9302 if (track.type === 'audio') {
9303 return audioTrun(track, offset);
9304 }
9305
9306 return videoTrun(track, offset);
9307 };
9308 })();
9309
9310 var mp4Generator = {
9311 ftyp: ftyp,
9312 mdat: mdat,
9313 moof: moof,
9314 moov: moov,
9315 initSegment: function initSegment(tracks) {
9316 var fileType = ftyp(),
9317 movie = moov(tracks),
9318 result;
9319 result = new Uint8Array(fileType.byteLength + movie.byteLength);
9320 result.set(fileType);
9321 result.set(movie, fileType.byteLength);
9322 return result;
9323 }
9324 };
9325 /**
9326 * mux.js
9327 *
9328 * Copyright (c) Brightcove
9329 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9330 */
9331 // Convert an array of nal units into an array of frames with each frame being
9332 // composed of the nal units that make up that frame
9333 // Also keep track of cummulative data about the frame from the nal units such
9334 // as the frame duration, starting pts, etc.
9335
9336 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
9337 var i,
9338 currentNal,
9339 currentFrame = [],
9340 frames = []; // TODO added for LHLS, make sure this is OK
9341
9342 frames.byteLength = 0;
9343 frames.nalCount = 0;
9344 frames.duration = 0;
9345 currentFrame.byteLength = 0;
9346
9347 for (i = 0; i < nalUnits.length; i++) {
9348 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
9349
9350 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
9351 // Since the very first nal unit is expected to be an AUD
9352 // only push to the frames array when currentFrame is not empty
9353 if (currentFrame.length) {
9354 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
9355
9356 frames.byteLength += currentFrame.byteLength;
9357 frames.nalCount += currentFrame.length;
9358 frames.duration += currentFrame.duration;
9359 frames.push(currentFrame);
9360 }
9361
9362 currentFrame = [currentNal];
9363 currentFrame.byteLength = currentNal.data.byteLength;
9364 currentFrame.pts = currentNal.pts;
9365 currentFrame.dts = currentNal.dts;
9366 } else {
9367 // Specifically flag key frames for ease of use later
9368 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
9369 currentFrame.keyFrame = true;
9370 }
9371
9372 currentFrame.duration = currentNal.dts - currentFrame.dts;
9373 currentFrame.byteLength += currentNal.data.byteLength;
9374 currentFrame.push(currentNal);
9375 }
9376 } // For the last frame, use the duration of the previous frame if we
9377 // have nothing better to go on
9378
9379
9380 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
9381 currentFrame.duration = frames[frames.length - 1].duration;
9382 } // Push the final frame
9383 // TODO added for LHLS, make sure this is OK
9384
9385
9386 frames.byteLength += currentFrame.byteLength;
9387 frames.nalCount += currentFrame.length;
9388 frames.duration += currentFrame.duration;
9389 frames.push(currentFrame);
9390 return frames;
9391 }; // Convert an array of frames into an array of Gop with each Gop being composed
9392 // of the frames that make up that Gop
9393 // Also keep track of cummulative data about the Gop from the frames such as the
9394 // Gop duration, starting pts, etc.
9395
9396
9397 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
9398 var i,
9399 currentFrame,
9400 currentGop = [],
9401 gops = []; // We must pre-set some of the values on the Gop since we
9402 // keep running totals of these values
9403
9404 currentGop.byteLength = 0;
9405 currentGop.nalCount = 0;
9406 currentGop.duration = 0;
9407 currentGop.pts = frames[0].pts;
9408 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
9409
9410 gops.byteLength = 0;
9411 gops.nalCount = 0;
9412 gops.duration = 0;
9413 gops.pts = frames[0].pts;
9414 gops.dts = frames[0].dts;
9415
9416 for (i = 0; i < frames.length; i++) {
9417 currentFrame = frames[i];
9418
9419 if (currentFrame.keyFrame) {
9420 // Since the very first frame is expected to be an keyframe
9421 // only push to the gops array when currentGop is not empty
9422 if (currentGop.length) {
9423 gops.push(currentGop);
9424 gops.byteLength += currentGop.byteLength;
9425 gops.nalCount += currentGop.nalCount;
9426 gops.duration += currentGop.duration;
9427 }
9428
9429 currentGop = [currentFrame];
9430 currentGop.nalCount = currentFrame.length;
9431 currentGop.byteLength = currentFrame.byteLength;
9432 currentGop.pts = currentFrame.pts;
9433 currentGop.dts = currentFrame.dts;
9434 currentGop.duration = currentFrame.duration;
9435 } else {
9436 currentGop.duration += currentFrame.duration;
9437 currentGop.nalCount += currentFrame.length;
9438 currentGop.byteLength += currentFrame.byteLength;
9439 currentGop.push(currentFrame);
9440 }
9441 }
9442
9443 if (gops.length && currentGop.duration <= 0) {
9444 currentGop.duration = gops[gops.length - 1].duration;
9445 }
9446
9447 gops.byteLength += currentGop.byteLength;
9448 gops.nalCount += currentGop.nalCount;
9449 gops.duration += currentGop.duration; // push the final Gop
9450
9451 gops.push(currentGop);
9452 return gops;
9453 };
9454 /*
9455 * Search for the first keyframe in the GOPs and throw away all frames
9456 * until that keyframe. Then extend the duration of the pulled keyframe
9457 * and pull the PTS and DTS of the keyframe so that it covers the time
9458 * range of the frames that were disposed.
9459 *
9460 * @param {Array} gops video GOPs
9461 * @returns {Array} modified video GOPs
9462 */
9463
9464
9465 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
9466 var currentGop;
9467
9468 if (!gops[0][0].keyFrame && gops.length > 1) {
9469 // Remove the first GOP
9470 currentGop = gops.shift();
9471 gops.byteLength -= currentGop.byteLength;
9472 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
9473 // first gop to cover the time period of the
9474 // frames we just removed
9475
9476 gops[0][0].dts = currentGop.dts;
9477 gops[0][0].pts = currentGop.pts;
9478 gops[0][0].duration += currentGop.duration;
9479 }
9480
9481 return gops;
9482 };
9483 /**
9484 * Default sample object
9485 * see ISO/IEC 14496-12:2012, section 8.6.4.3
9486 */
9487
9488
9489 var createDefaultSample = function createDefaultSample() {
9490 return {
9491 size: 0,
9492 flags: {
9493 isLeading: 0,
9494 dependsOn: 1,
9495 isDependedOn: 0,
9496 hasRedundancy: 0,
9497 degradationPriority: 0,
9498 isNonSyncSample: 1
9499 }
9500 };
9501 };
9502 /*
9503 * Collates information from a video frame into an object for eventual
9504 * entry into an MP4 sample table.
9505 *
9506 * @param {Object} frame the video frame
9507 * @param {Number} dataOffset the byte offset to position the sample
9508 * @return {Object} object containing sample table info for a frame
9509 */
9510
9511
9512 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
9513 var sample = createDefaultSample();
9514 sample.dataOffset = dataOffset;
9515 sample.compositionTimeOffset = frame.pts - frame.dts;
9516 sample.duration = frame.duration;
9517 sample.size = 4 * frame.length; // Space for nal unit size
9518
9519 sample.size += frame.byteLength;
9520
9521 if (frame.keyFrame) {
9522 sample.flags.dependsOn = 2;
9523 sample.flags.isNonSyncSample = 0;
9524 }
9525
9526 return sample;
9527 }; // generate the track's sample table from an array of gops
9528
9529
9530 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
9531 var h,
9532 i,
9533 sample,
9534 currentGop,
9535 currentFrame,
9536 dataOffset = baseDataOffset || 0,
9537 samples = [];
9538
9539 for (h = 0; h < gops.length; h++) {
9540 currentGop = gops[h];
9541
9542 for (i = 0; i < currentGop.length; i++) {
9543 currentFrame = currentGop[i];
9544 sample = sampleForFrame(currentFrame, dataOffset);
9545 dataOffset += sample.size;
9546 samples.push(sample);
9547 }
9548 }
9549
9550 return samples;
9551 }; // generate the track's raw mdat data from an array of gops
9552
9553
9554 var concatenateNalData = function concatenateNalData(gops) {
9555 var h,
9556 i,
9557 j,
9558 currentGop,
9559 currentFrame,
9560 currentNal,
9561 dataOffset = 0,
9562 nalsByteLength = gops.byteLength,
9563 numberOfNals = gops.nalCount,
9564 totalByteLength = nalsByteLength + 4 * numberOfNals,
9565 data = new Uint8Array(totalByteLength),
9566 view = new DataView(data.buffer); // For each Gop..
9567
9568 for (h = 0; h < gops.length; h++) {
9569 currentGop = gops[h]; // For each Frame..
9570
9571 for (i = 0; i < currentGop.length; i++) {
9572 currentFrame = currentGop[i]; // For each NAL..
9573
9574 for (j = 0; j < currentFrame.length; j++) {
9575 currentNal = currentFrame[j];
9576 view.setUint32(dataOffset, currentNal.data.byteLength);
9577 dataOffset += 4;
9578 data.set(currentNal.data, dataOffset);
9579 dataOffset += currentNal.data.byteLength;
9580 }
9581 }
9582 }
9583
9584 return data;
9585 }; // generate the track's sample table from a frame
9586
9587
9588 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
9589 var sample,
9590 dataOffset = baseDataOffset || 0,
9591 samples = [];
9592 sample = sampleForFrame(frame, dataOffset);
9593 samples.push(sample);
9594 return samples;
9595 }; // generate the track's raw mdat data from a frame
9596
9597
9598 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
9599 var i,
9600 currentNal,
9601 dataOffset = 0,
9602 nalsByteLength = frame.byteLength,
9603 numberOfNals = frame.length,
9604 totalByteLength = nalsByteLength + 4 * numberOfNals,
9605 data = new Uint8Array(totalByteLength),
9606 view = new DataView(data.buffer); // For each NAL..
9607
9608 for (i = 0; i < frame.length; i++) {
9609 currentNal = frame[i];
9610 view.setUint32(dataOffset, currentNal.data.byteLength);
9611 dataOffset += 4;
9612 data.set(currentNal.data, dataOffset);
9613 dataOffset += currentNal.data.byteLength;
9614 }
9615
9616 return data;
9617 };
9618
9619 var frameUtils = {
9620 groupNalsIntoFrames: groupNalsIntoFrames,
9621 groupFramesIntoGops: groupFramesIntoGops,
9622 extendFirstKeyFrame: extendFirstKeyFrame,
9623 generateSampleTable: generateSampleTable$1,
9624 concatenateNalData: concatenateNalData,
9625 generateSampleTableForFrame: generateSampleTableForFrame,
9626 concatenateNalDataForFrame: concatenateNalDataForFrame
9627 };
9628 /**
9629 * mux.js
9630 *
9631 * Copyright (c) Brightcove
9632 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9633 */
9634
9635 var highPrefix = [33, 16, 5, 32, 164, 27];
9636 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
9637
9638 var zeroFill = function zeroFill(count) {
9639 var a = [];
9640
9641 while (count--) {
9642 a.push(0);
9643 }
9644
9645 return a;
9646 };
9647
9648 var makeTable = function makeTable(metaTable) {
9649 return Object.keys(metaTable).reduce(function (obj, key) {
9650 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
9651 return arr.concat(part);
9652 }, []));
9653 return obj;
9654 }, {});
9655 };
9656
9657 var silence;
9658
9659 var silence_1 = function silence_1() {
9660 if (!silence) {
9661 // Frames-of-silence to use for filling in missing AAC frames
9662 var coneOfSilence = {
9663 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
9664 88200: [highPrefix, [231], zeroFill(170), [56]],
9665 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
9666 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
9667 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
9668 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
9669 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
9670 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
9671 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
9672 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
9673 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
9674 };
9675 silence = makeTable(coneOfSilence);
9676 }
9677
9678 return silence;
9679 };
9680 /**
9681 * mux.js
9682 *
9683 * Copyright (c) Brightcove
9684 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9685 */
9686
9687
9688 var ONE_SECOND_IN_TS$4 = 90000,
9689 // 90kHz clock
9690 secondsToVideoTs,
9691 secondsToAudioTs,
9692 videoTsToSeconds,
9693 audioTsToSeconds,
9694 audioTsToVideoTs,
9695 videoTsToAudioTs,
9696 metadataTsToSeconds;
9697
9698 secondsToVideoTs = function secondsToVideoTs(seconds) {
9699 return seconds * ONE_SECOND_IN_TS$4;
9700 };
9701
9702 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
9703 return seconds * sampleRate;
9704 };
9705
9706 videoTsToSeconds = function videoTsToSeconds(timestamp) {
9707 return timestamp / ONE_SECOND_IN_TS$4;
9708 };
9709
9710 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
9711 return timestamp / sampleRate;
9712 };
9713
9714 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
9715 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
9716 };
9717
9718 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
9719 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
9720 };
9721 /**
9722 * Adjust ID3 tag or caption timing information by the timeline pts values
9723 * (if keepOriginalTimestamps is false) and convert to seconds
9724 */
9725
9726
9727 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
9728 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
9729 };
9730
9731 var clock = {
9732 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
9733 secondsToVideoTs: secondsToVideoTs,
9734 secondsToAudioTs: secondsToAudioTs,
9735 videoTsToSeconds: videoTsToSeconds,
9736 audioTsToSeconds: audioTsToSeconds,
9737 audioTsToVideoTs: audioTsToVideoTs,
9738 videoTsToAudioTs: videoTsToAudioTs,
9739 metadataTsToSeconds: metadataTsToSeconds
9740 };
9741 /**
9742 * mux.js
9743 *
9744 * Copyright (c) Brightcove
9745 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9746 */
9747
9748 /**
9749 * Sum the `byteLength` properties of the data in each AAC frame
9750 */
9751
9752 var sumFrameByteLengths = function sumFrameByteLengths(array) {
9753 var i,
9754 currentObj,
9755 sum = 0; // sum the byteLength's all each nal unit in the frame
9756
9757 for (i = 0; i < array.length; i++) {
9758 currentObj = array[i];
9759 sum += currentObj.data.byteLength;
9760 }
9761
9762 return sum;
9763 }; // Possibly pad (prefix) the audio track with silence if appending this track
9764 // would lead to the introduction of a gap in the audio buffer
9765
9766
9767 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
9768 var baseMediaDecodeTimeTs,
9769 frameDuration = 0,
9770 audioGapDuration = 0,
9771 audioFillFrameCount = 0,
9772 audioFillDuration = 0,
9773 silentFrame,
9774 i,
9775 firstFrame;
9776
9777 if (!frames.length) {
9778 return;
9779 }
9780
9781 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
9782
9783 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
9784
9785 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
9786 // insert the shortest possible amount (audio gap or audio to video gap)
9787 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
9788
9789 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
9790 audioFillDuration = audioFillFrameCount * frameDuration;
9791 } // don't attempt to fill gaps smaller than a single frame or larger
9792 // than a half second
9793
9794
9795 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
9796 return;
9797 }
9798
9799 silentFrame = silence_1()[track.samplerate];
9800
9801 if (!silentFrame) {
9802 // we don't have a silent frame pregenerated for the sample rate, so use a frame
9803 // from the content instead
9804 silentFrame = frames[0].data;
9805 }
9806
9807 for (i = 0; i < audioFillFrameCount; i++) {
9808 firstFrame = frames[0];
9809 frames.splice(0, 0, {
9810 data: silentFrame,
9811 dts: firstFrame.dts - frameDuration,
9812 pts: firstFrame.pts - frameDuration
9813 });
9814 }
9815
9816 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
9817 return audioFillDuration;
9818 }; // If the audio segment extends before the earliest allowed dts
9819 // value, remove AAC frames until starts at or after the earliest
9820 // allowed DTS so that we don't end up with a negative baseMedia-
9821 // DecodeTime for the audio track
9822
9823
9824 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
9825 if (track.minSegmentDts >= earliestAllowedDts) {
9826 return adtsFrames;
9827 } // We will need to recalculate the earliest segment Dts
9828
9829
9830 track.minSegmentDts = Infinity;
9831 return adtsFrames.filter(function (currentFrame) {
9832 // If this is an allowed frame, keep it and record it's Dts
9833 if (currentFrame.dts >= earliestAllowedDts) {
9834 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
9835 track.minSegmentPts = track.minSegmentDts;
9836 return true;
9837 } // Otherwise, discard it
9838
9839
9840 return false;
9841 });
9842 }; // generate the track's raw mdat data from an array of frames
9843
9844
9845 var generateSampleTable = function generateSampleTable(frames) {
9846 var i,
9847 currentFrame,
9848 samples = [];
9849
9850 for (i = 0; i < frames.length; i++) {
9851 currentFrame = frames[i];
9852 samples.push({
9853 size: currentFrame.data.byteLength,
9854 duration: 1024 // For AAC audio, all samples contain 1024 samples
9855
9856 });
9857 }
9858
9859 return samples;
9860 }; // generate the track's sample table from an array of frames
9861
9862
9863 var concatenateFrameData = function concatenateFrameData(frames) {
9864 var i,
9865 currentFrame,
9866 dataOffset = 0,
9867 data = new Uint8Array(sumFrameByteLengths(frames));
9868
9869 for (i = 0; i < frames.length; i++) {
9870 currentFrame = frames[i];
9871 data.set(currentFrame.data, dataOffset);
9872 dataOffset += currentFrame.data.byteLength;
9873 }
9874
9875 return data;
9876 };
9877
9878 var audioFrameUtils = {
9879 prefixWithSilence: prefixWithSilence,
9880 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
9881 generateSampleTable: generateSampleTable,
9882 concatenateFrameData: concatenateFrameData
9883 };
9884 /**
9885 * mux.js
9886 *
9887 * Copyright (c) Brightcove
9888 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9889 */
9890
9891 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
9892 /**
9893 * Store information about the start and end of the track and the
9894 * duration for each frame/sample we process in order to calculate
9895 * the baseMediaDecodeTime
9896 */
9897
9898 var collectDtsInfo = function collectDtsInfo(track, data) {
9899 if (typeof data.pts === 'number') {
9900 if (track.timelineStartInfo.pts === undefined) {
9901 track.timelineStartInfo.pts = data.pts;
9902 }
9903
9904 if (track.minSegmentPts === undefined) {
9905 track.minSegmentPts = data.pts;
9906 } else {
9907 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
9908 }
9909
9910 if (track.maxSegmentPts === undefined) {
9911 track.maxSegmentPts = data.pts;
9912 } else {
9913 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
9914 }
9915 }
9916
9917 if (typeof data.dts === 'number') {
9918 if (track.timelineStartInfo.dts === undefined) {
9919 track.timelineStartInfo.dts = data.dts;
9920 }
9921
9922 if (track.minSegmentDts === undefined) {
9923 track.minSegmentDts = data.dts;
9924 } else {
9925 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
9926 }
9927
9928 if (track.maxSegmentDts === undefined) {
9929 track.maxSegmentDts = data.dts;
9930 } else {
9931 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
9932 }
9933 }
9934 };
9935 /**
9936 * Clear values used to calculate the baseMediaDecodeTime between
9937 * tracks
9938 */
9939
9940
9941 var clearDtsInfo = function clearDtsInfo(track) {
9942 delete track.minSegmentDts;
9943 delete track.maxSegmentDts;
9944 delete track.minSegmentPts;
9945 delete track.maxSegmentPts;
9946 };
9947 /**
9948 * Calculate the track's baseMediaDecodeTime based on the earliest
9949 * DTS the transmuxer has ever seen and the minimum DTS for the
9950 * current track
9951 * @param track {object} track metadata configuration
9952 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
9953 * in the source; false to adjust the first segment to start at 0.
9954 */
9955
9956
9957 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
9958 var baseMediaDecodeTime,
9959 scale,
9960 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
9961
9962 if (!keepOriginalTimestamps) {
9963 minSegmentDts -= track.timelineStartInfo.dts;
9964 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
9965 // we want the start of the first segment to be placed
9966
9967
9968 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
9969
9970 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
9971
9972 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
9973
9974 if (track.type === 'audio') {
9975 // Audio has a different clock equal to the sampling_rate so we need to
9976 // scale the PTS values into the clock rate of the track
9977 scale = track.samplerate / ONE_SECOND_IN_TS$3;
9978 baseMediaDecodeTime *= scale;
9979 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
9980 }
9981
9982 return baseMediaDecodeTime;
9983 };
9984
9985 var trackDecodeInfo = {
9986 clearDtsInfo: clearDtsInfo,
9987 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
9988 collectDtsInfo: collectDtsInfo
9989 };
9990 /**
9991 * mux.js
9992 *
9993 * Copyright (c) Brightcove
9994 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9995 *
9996 * Reads in-band caption information from a video elementary
9997 * stream. Captions must follow the CEA-708 standard for injection
9998 * into an MPEG-2 transport streams.
9999 * @see https://en.wikipedia.org/wiki/CEA-708
10000 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
10001 */
10002 // payload type field to indicate how they are to be
10003 // interpreted. CEAS-708 caption content is always transmitted with
10004 // payload type 0x04.
10005
10006 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
10007 RBSP_TRAILING_BITS = 128;
10008 /**
10009 * Parse a supplemental enhancement information (SEI) NAL unit.
10010 * Stops parsing once a message of type ITU T T35 has been found.
10011 *
10012 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
10013 * @return {object} the parsed SEI payload
10014 * @see Rec. ITU-T H.264, 7.3.2.3.1
10015 */
10016
10017 var parseSei = function parseSei(bytes) {
10018 var i = 0,
10019 result = {
10020 payloadType: -1,
10021 payloadSize: 0
10022 },
10023 payloadType = 0,
10024 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
10025
10026 while (i < bytes.byteLength) {
10027 // stop once we have hit the end of the sei_rbsp
10028 if (bytes[i] === RBSP_TRAILING_BITS) {
10029 break;
10030 } // Parse payload type
10031
10032
10033 while (bytes[i] === 0xFF) {
10034 payloadType += 255;
10035 i++;
10036 }
10037
10038 payloadType += bytes[i++]; // Parse payload size
10039
10040 while (bytes[i] === 0xFF) {
10041 payloadSize += 255;
10042 i++;
10043 }
10044
10045 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
10046 // there can only ever be one caption message in a frame's sei
10047
10048 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
10049 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
10050
10051 if (userIdentifier === 'GA94') {
10052 result.payloadType = payloadType;
10053 result.payloadSize = payloadSize;
10054 result.payload = bytes.subarray(i, i + payloadSize);
10055 break;
10056 } else {
10057 result.payload = void 0;
10058 }
10059 } // skip the payload and parse the next message
10060
10061
10062 i += payloadSize;
10063 payloadType = 0;
10064 payloadSize = 0;
10065 }
10066
10067 return result;
10068 }; // see ANSI/SCTE 128-1 (2013), section 8.1
10069
10070
10071 var parseUserData = function parseUserData(sei) {
10072 // itu_t_t35_contry_code must be 181 (United States) for
10073 // captions
10074 if (sei.payload[0] !== 181) {
10075 return null;
10076 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
10077
10078
10079 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
10080 return null;
10081 } // the user_identifier should be "GA94" to indicate ATSC1 data
10082
10083
10084 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
10085 return null;
10086 } // finally, user_data_type_code should be 0x03 for caption data
10087
10088
10089 if (sei.payload[7] !== 0x03) {
10090 return null;
10091 } // return the user_data_type_structure and strip the trailing
10092 // marker bits
10093
10094
10095 return sei.payload.subarray(8, sei.payload.length - 1);
10096 }; // see CEA-708-D, section 4.4
10097
10098
10099 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
10100 var results = [],
10101 i,
10102 count,
10103 offset,
10104 data; // if this is just filler, return immediately
10105
10106 if (!(userData[0] & 0x40)) {
10107 return results;
10108 } // parse out the cc_data_1 and cc_data_2 fields
10109
10110
10111 count = userData[0] & 0x1f;
10112
10113 for (i = 0; i < count; i++) {
10114 offset = i * 3;
10115 data = {
10116 type: userData[offset + 2] & 0x03,
10117 pts: pts
10118 }; // capture cc data when cc_valid is 1
10119
10120 if (userData[offset + 2] & 0x04) {
10121 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
10122 results.push(data);
10123 }
10124 }
10125
10126 return results;
10127 };
10128
10129 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
10130 var length = data.byteLength,
10131 emulationPreventionBytesPositions = [],
10132 i = 1,
10133 newLength,
10134 newData; // Find all `Emulation Prevention Bytes`
10135
10136 while (i < length - 2) {
10137 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
10138 emulationPreventionBytesPositions.push(i + 2);
10139 i += 2;
10140 } else {
10141 i++;
10142 }
10143 } // If no Emulation Prevention Bytes were found just return the original
10144 // array
10145
10146
10147 if (emulationPreventionBytesPositions.length === 0) {
10148 return data;
10149 } // Create a new array to hold the NAL unit data
10150
10151
10152 newLength = length - emulationPreventionBytesPositions.length;
10153 newData = new Uint8Array(newLength);
10154 var sourceIndex = 0;
10155
10156 for (i = 0; i < newLength; sourceIndex++, i++) {
10157 if (sourceIndex === emulationPreventionBytesPositions[0]) {
10158 // Skip this byte
10159 sourceIndex++; // Remove this position index
10160
10161 emulationPreventionBytesPositions.shift();
10162 }
10163
10164 newData[i] = data[sourceIndex];
10165 }
10166
10167 return newData;
10168 }; // exports
10169
10170
10171 var captionPacketParser = {
10172 parseSei: parseSei,
10173 parseUserData: parseUserData,
10174 parseCaptionPackets: parseCaptionPackets,
10175 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
10176 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
10177 }; // Link To Transport
10178 // -----------------
10179
10180 var CaptionStream$1 = function CaptionStream(options) {
10181 options = options || {};
10182 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
10183
10184 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
10185 this.captionPackets_ = [];
10186 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
10187 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
10188 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
10189 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
10190 ];
10191
10192 if (this.parse708captions_) {
10193 this.cc708Stream_ = new Cea708Stream(); // eslint-disable-line no-use-before-define
10194 }
10195
10196 this.reset(); // forward data and done events from CCs to this CaptionStream
10197
10198 this.ccStreams_.forEach(function (cc) {
10199 cc.on('data', this.trigger.bind(this, 'data'));
10200 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
10201 cc.on('done', this.trigger.bind(this, 'done'));
10202 }, this);
10203
10204 if (this.parse708captions_) {
10205 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
10206 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
10207 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
10208 }
10209 };
10210
10211 CaptionStream$1.prototype = new stream();
10212
10213 CaptionStream$1.prototype.push = function (event) {
10214 var sei, userData, newCaptionPackets; // only examine SEI NALs
10215
10216 if (event.nalUnitType !== 'sei_rbsp') {
10217 return;
10218 } // parse the sei
10219
10220
10221 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
10222
10223 if (!sei.payload) {
10224 return;
10225 } // ignore everything but user_data_registered_itu_t_t35
10226
10227
10228 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
10229 return;
10230 } // parse out the user data payload
10231
10232
10233 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
10234
10235 if (!userData) {
10236 return;
10237 } // Sometimes, the same segment # will be downloaded twice. To stop the
10238 // caption data from being processed twice, we track the latest dts we've
10239 // received and ignore everything with a dts before that. However, since
10240 // data for a specific dts can be split across packets on either side of
10241 // a segment boundary, we need to make sure we *don't* ignore the packets
10242 // from the *next* segment that have dts === this.latestDts_. By constantly
10243 // tracking the number of packets received with dts === this.latestDts_, we
10244 // know how many should be ignored once we start receiving duplicates.
10245
10246
10247 if (event.dts < this.latestDts_) {
10248 // We've started getting older data, so set the flag.
10249 this.ignoreNextEqualDts_ = true;
10250 return;
10251 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
10252 this.numSameDts_--;
10253
10254 if (!this.numSameDts_) {
10255 // We've received the last duplicate packet, time to start processing again
10256 this.ignoreNextEqualDts_ = false;
10257 }
10258
10259 return;
10260 } // parse out CC data packets and save them for later
10261
10262
10263 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
10264 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
10265
10266 if (this.latestDts_ !== event.dts) {
10267 this.numSameDts_ = 0;
10268 }
10269
10270 this.numSameDts_++;
10271 this.latestDts_ = event.dts;
10272 };
10273
10274 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
10275 this.ccStreams_.forEach(function (cc) {
10276 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
10277 }, this);
10278 };
10279
10280 CaptionStream$1.prototype.flushStream = function (flushType) {
10281 // make sure we actually parsed captions before proceeding
10282 if (!this.captionPackets_.length) {
10283 this.flushCCStreams(flushType);
10284 return;
10285 } // In Chrome, the Array#sort function is not stable so add a
10286 // presortIndex that we can use to ensure we get a stable-sort
10287
10288
10289 this.captionPackets_.forEach(function (elem, idx) {
10290 elem.presortIndex = idx;
10291 }); // sort caption byte-pairs based on their PTS values
10292
10293 this.captionPackets_.sort(function (a, b) {
10294 if (a.pts === b.pts) {
10295 return a.presortIndex - b.presortIndex;
10296 }
10297
10298 return a.pts - b.pts;
10299 });
10300 this.captionPackets_.forEach(function (packet) {
10301 if (packet.type < 2) {
10302 // Dispatch packet to the right Cea608Stream
10303 this.dispatchCea608Packet(packet);
10304 } else {
10305 // Dispatch packet to the Cea708Stream
10306 this.dispatchCea708Packet(packet);
10307 }
10308 }, this);
10309 this.captionPackets_.length = 0;
10310 this.flushCCStreams(flushType);
10311 };
10312
10313 CaptionStream$1.prototype.flush = function () {
10314 return this.flushStream('flush');
10315 }; // Only called if handling partial data
10316
10317
10318 CaptionStream$1.prototype.partialFlush = function () {
10319 return this.flushStream('partialFlush');
10320 };
10321
10322 CaptionStream$1.prototype.reset = function () {
10323 this.latestDts_ = null;
10324 this.ignoreNextEqualDts_ = false;
10325 this.numSameDts_ = 0;
10326 this.activeCea608Channel_ = [null, null];
10327 this.ccStreams_.forEach(function (ccStream) {
10328 ccStream.reset();
10329 });
10330 }; // From the CEA-608 spec:
10331
10332 /*
10333 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
10334 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
10335 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
10336 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
10337 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
10338 * to switch to captioning or Text.
10339 */
10340 // With that in mind, we ignore any data between an XDS control code and a
10341 // subsequent closed-captioning control code.
10342
10343
10344 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
10345 // NOTE: packet.type is the CEA608 field
10346 if (this.setsTextOrXDSActive(packet)) {
10347 this.activeCea608Channel_[packet.type] = null;
10348 } else if (this.setsChannel1Active(packet)) {
10349 this.activeCea608Channel_[packet.type] = 0;
10350 } else if (this.setsChannel2Active(packet)) {
10351 this.activeCea608Channel_[packet.type] = 1;
10352 }
10353
10354 if (this.activeCea608Channel_[packet.type] === null) {
10355 // If we haven't received anything to set the active channel, or the
10356 // packets are Text/XDS data, discard the data; we don't want jumbled
10357 // captions
10358 return;
10359 }
10360
10361 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
10362 };
10363
10364 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
10365 return (packet.ccData & 0x7800) === 0x1000;
10366 };
10367
10368 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
10369 return (packet.ccData & 0x7800) === 0x1800;
10370 };
10371
10372 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
10373 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
10374 };
10375
10376 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
10377 if (this.parse708captions_) {
10378 this.cc708Stream_.push(packet);
10379 }
10380 }; // ----------------------
10381 // Session to Application
10382 // ----------------------
10383 // This hash maps special and extended character codes to their
10384 // proper Unicode equivalent. The first one-byte key is just a
10385 // non-standard character code. The two-byte keys that follow are
10386 // the extended CEA708 character codes, along with the preceding
10387 // 0x10 extended character byte to distinguish these codes from
10388 // non-extended character codes. Every CEA708 character code that
10389 // is not in this object maps directly to a standard unicode
10390 // character code.
10391 // The transparent space and non-breaking transparent space are
10392 // technically not fully supported since there is no code to
10393 // make them transparent, so they have normal non-transparent
10394 // stand-ins.
10395 // The special closed caption (CC) character isn't a standard
10396 // unicode character, so a fairly similar unicode character was
10397 // chosen in it's place.
10398
10399
10400 var CHARACTER_TRANSLATION_708 = {
10401 0x7f: 0x266a,
10402 // ♪
10403 0x1020: 0x20,
10404 // Transparent Space
10405 0x1021: 0xa0,
10406 // Nob-breaking Transparent Space
10407 0x1025: 0x2026,
10408 // …
10409 0x102a: 0x0160,
10410 // Š
10411 0x102c: 0x0152,
10412 // Œ
10413 0x1030: 0x2588,
10414 // █
10415 0x1031: 0x2018,
10416 // ‘
10417 0x1032: 0x2019,
10418 // ’
10419 0x1033: 0x201c,
10420 // “
10421 0x1034: 0x201d,
10422 // ”
10423 0x1035: 0x2022,
10424 // •
10425 0x1039: 0x2122,
10426 // ™
10427 0x103a: 0x0161,
10428 // š
10429 0x103c: 0x0153,
10430 // œ
10431 0x103d: 0x2120,
10432 // ℠
10433 0x103f: 0x0178,
10434 // Ÿ
10435 0x1076: 0x215b,
10436 // ⅛
10437 0x1077: 0x215c,
10438 // ⅜
10439 0x1078: 0x215d,
10440 // ⅝
10441 0x1079: 0x215e,
10442 // ⅞
10443 0x107a: 0x23d0,
10444 // ⏐
10445 0x107b: 0x23a4,
10446 // ⎤
10447 0x107c: 0x23a3,
10448 // ⎣
10449 0x107d: 0x23af,
10450 // ⎯
10451 0x107e: 0x23a6,
10452 // ⎦
10453 0x107f: 0x23a1,
10454 // ⎡
10455 0x10a0: 0x3138 // ㄸ (CC char)
10456
10457 };
10458
10459 var get708CharFromCode = function get708CharFromCode(code) {
10460 var newCode = CHARACTER_TRANSLATION_708[code] || code;
10461
10462 if (code & 0x1000 && code === newCode) {
10463 // Invalid extended code
10464 return '';
10465 }
10466
10467 return String.fromCharCode(newCode);
10468 };
10469
10470 var within708TextBlock = function within708TextBlock(b) {
10471 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
10472 };
10473
10474 var Cea708Window = function Cea708Window(windowNum) {
10475 this.windowNum = windowNum;
10476 this.reset();
10477 };
10478
10479 Cea708Window.prototype.reset = function () {
10480 this.clearText();
10481 this.pendingNewLine = false;
10482 this.winAttr = {};
10483 this.penAttr = {};
10484 this.penLoc = {};
10485 this.penColor = {}; // These default values are arbitrary,
10486 // defineWindow will usually override them
10487
10488 this.visible = 0;
10489 this.rowLock = 0;
10490 this.columnLock = 0;
10491 this.priority = 0;
10492 this.relativePositioning = 0;
10493 this.anchorVertical = 0;
10494 this.anchorHorizontal = 0;
10495 this.anchorPoint = 0;
10496 this.rowCount = 1;
10497 this.virtualRowCount = this.rowCount + 1;
10498 this.columnCount = 41;
10499 this.windowStyle = 0;
10500 this.penStyle = 0;
10501 };
10502
10503 Cea708Window.prototype.getText = function () {
10504 return this.rows.join('\n');
10505 };
10506
10507 Cea708Window.prototype.clearText = function () {
10508 this.rows = [''];
10509 this.rowIdx = 0;
10510 };
10511
10512 Cea708Window.prototype.newLine = function (pts) {
10513 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
10514 this.beforeRowOverflow(pts);
10515 }
10516
10517 if (this.rows.length > 0) {
10518 this.rows.push('');
10519 this.rowIdx++;
10520 } // Show all virtual rows since there's no visible scrolling
10521
10522
10523 while (this.rows.length > this.virtualRowCount) {
10524 this.rows.shift();
10525 this.rowIdx--;
10526 }
10527 };
10528
10529 Cea708Window.prototype.isEmpty = function () {
10530 if (this.rows.length === 0) {
10531 return true;
10532 } else if (this.rows.length === 1) {
10533 return this.rows[0] === '';
10534 }
10535
10536 return false;
10537 };
10538
10539 Cea708Window.prototype.addText = function (text) {
10540 this.rows[this.rowIdx] += text;
10541 };
10542
10543 Cea708Window.prototype.backspace = function () {
10544 if (!this.isEmpty()) {
10545 var row = this.rows[this.rowIdx];
10546 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
10547 }
10548 };
10549
10550 var Cea708Service = function Cea708Service(serviceNum) {
10551 this.serviceNum = serviceNum;
10552 this.text = '';
10553 this.currentWindow = new Cea708Window(-1);
10554 this.windows = [];
10555 };
10556 /**
10557 * Initialize service windows
10558 * Must be run before service use
10559 *
10560 * @param {Integer} pts PTS value
10561 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
10562 */
10563
10564
10565 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
10566 this.startPts = pts;
10567
10568 for (var win = 0; win < 8; win++) {
10569 this.windows[win] = new Cea708Window(win);
10570
10571 if (typeof beforeRowOverflow === 'function') {
10572 this.windows[win].beforeRowOverflow = beforeRowOverflow;
10573 }
10574 }
10575 };
10576 /**
10577 * Set current window of service to be affected by commands
10578 *
10579 * @param {Integer} windowNum Window number
10580 */
10581
10582
10583 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
10584 this.currentWindow = this.windows[windowNum];
10585 };
10586
10587 var Cea708Stream = function Cea708Stream() {
10588 Cea708Stream.prototype.init.call(this);
10589 var self = this;
10590 this.current708Packet = null;
10591 this.services = {};
10592
10593 this.push = function (packet) {
10594 if (packet.type === 3) {
10595 // 708 packet start
10596 self.new708Packet();
10597 self.add708Bytes(packet);
10598 } else {
10599 if (self.current708Packet === null) {
10600 // This should only happen at the start of a file if there's no packet start.
10601 self.new708Packet();
10602 }
10603
10604 self.add708Bytes(packet);
10605 }
10606 };
10607 };
10608
10609 Cea708Stream.prototype = new stream();
10610 /**
10611 * Push current 708 packet, create new 708 packet.
10612 */
10613
10614 Cea708Stream.prototype.new708Packet = function () {
10615 if (this.current708Packet !== null) {
10616 this.push708Packet();
10617 }
10618
10619 this.current708Packet = {
10620 data: [],
10621 ptsVals: []
10622 };
10623 };
10624 /**
10625 * Add pts and both bytes from packet into current 708 packet.
10626 */
10627
10628
10629 Cea708Stream.prototype.add708Bytes = function (packet) {
10630 var data = packet.ccData;
10631 var byte0 = data >>> 8;
10632 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
10633 // that service blocks will always line up with byte pairs.
10634
10635 this.current708Packet.ptsVals.push(packet.pts);
10636 this.current708Packet.data.push(byte0);
10637 this.current708Packet.data.push(byte1);
10638 };
10639 /**
10640 * Parse completed 708 packet into service blocks and push each service block.
10641 */
10642
10643
10644 Cea708Stream.prototype.push708Packet = function () {
10645 var packet708 = this.current708Packet;
10646 var packetData = packet708.data;
10647 var serviceNum = null;
10648 var blockSize = null;
10649 var i = 0;
10650 var b = packetData[i++];
10651 packet708.seq = b >> 6;
10652 packet708.sizeCode = b & 0x3f; // 0b00111111;
10653
10654 for (; i < packetData.length; i++) {
10655 b = packetData[i++];
10656 serviceNum = b >> 5;
10657 blockSize = b & 0x1f; // 0b00011111
10658
10659 if (serviceNum === 7 && blockSize > 0) {
10660 // Extended service num
10661 b = packetData[i++];
10662 serviceNum = b;
10663 }
10664
10665 this.pushServiceBlock(serviceNum, i, blockSize);
10666
10667 if (blockSize > 0) {
10668 i += blockSize - 1;
10669 }
10670 }
10671 };
10672 /**
10673 * Parse service block, execute commands, read text.
10674 *
10675 * Note: While many of these commands serve important purposes,
10676 * many others just parse out the parameters or attributes, but
10677 * nothing is done with them because this is not a full and complete
10678 * implementation of the entire 708 spec.
10679 *
10680 * @param {Integer} serviceNum Service number
10681 * @param {Integer} start Start index of the 708 packet data
10682 * @param {Integer} size Block size
10683 */
10684
10685
10686 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
10687 var b;
10688 var i = start;
10689 var packetData = this.current708Packet.data;
10690 var service = this.services[serviceNum];
10691
10692 if (!service) {
10693 service = this.initService(serviceNum, i);
10694 }
10695
10696 for (; i < start + size && i < packetData.length; i++) {
10697 b = packetData[i];
10698
10699 if (within708TextBlock(b)) {
10700 i = this.handleText(i, service);
10701 } else if (b === 0x10) {
10702 i = this.extendedCommands(i, service);
10703 } else if (0x80 <= b && b <= 0x87) {
10704 i = this.setCurrentWindow(i, service);
10705 } else if (0x98 <= b && b <= 0x9f) {
10706 i = this.defineWindow(i, service);
10707 } else if (b === 0x88) {
10708 i = this.clearWindows(i, service);
10709 } else if (b === 0x8c) {
10710 i = this.deleteWindows(i, service);
10711 } else if (b === 0x89) {
10712 i = this.displayWindows(i, service);
10713 } else if (b === 0x8a) {
10714 i = this.hideWindows(i, service);
10715 } else if (b === 0x8b) {
10716 i = this.toggleWindows(i, service);
10717 } else if (b === 0x97) {
10718 i = this.setWindowAttributes(i, service);
10719 } else if (b === 0x90) {
10720 i = this.setPenAttributes(i, service);
10721 } else if (b === 0x91) {
10722 i = this.setPenColor(i, service);
10723 } else if (b === 0x92) {
10724 i = this.setPenLocation(i, service);
10725 } else if (b === 0x8f) {
10726 service = this.reset(i, service);
10727 } else if (b === 0x08) {
10728 // BS: Backspace
10729 service.currentWindow.backspace();
10730 } else if (b === 0x0c) {
10731 // FF: Form feed
10732 service.currentWindow.clearText();
10733 } else if (b === 0x0d) {
10734 // CR: Carriage return
10735 service.currentWindow.pendingNewLine = true;
10736 } else if (b === 0x0e) {
10737 // HCR: Horizontal carriage return
10738 service.currentWindow.clearText();
10739 } else if (b === 0x8d) {
10740 // DLY: Delay, nothing to do
10741 i++;
10742 } else ;
10743 }
10744 };
10745 /**
10746 * Execute an extended command
10747 *
10748 * @param {Integer} i Current index in the 708 packet
10749 * @param {Service} service The service object to be affected
10750 * @return {Integer} New index after parsing
10751 */
10752
10753
10754 Cea708Stream.prototype.extendedCommands = function (i, service) {
10755 var packetData = this.current708Packet.data;
10756 var b = packetData[++i];
10757
10758 if (within708TextBlock(b)) {
10759 i = this.handleText(i, service, true);
10760 }
10761
10762 return i;
10763 };
10764 /**
10765 * Get PTS value of a given byte index
10766 *
10767 * @param {Integer} byteIndex Index of the byte
10768 * @return {Integer} PTS
10769 */
10770
10771
10772 Cea708Stream.prototype.getPts = function (byteIndex) {
10773 // There's 1 pts value per 2 bytes
10774 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
10775 };
10776 /**
10777 * Initializes a service
10778 *
10779 * @param {Integer} serviceNum Service number
10780 * @return {Service} Initialized service object
10781 */
10782
10783
10784 Cea708Stream.prototype.initService = function (serviceNum, i) {
10785 var self = this;
10786 this.services[serviceNum] = new Cea708Service(serviceNum);
10787 this.services[serviceNum].init(this.getPts(i), function (pts) {
10788 self.flushDisplayed(pts, self.services[serviceNum]);
10789 });
10790 return this.services[serviceNum];
10791 };
10792 /**
10793 * Execute text writing to current window
10794 *
10795 * @param {Integer} i Current index in the 708 packet
10796 * @param {Service} service The service object to be affected
10797 * @return {Integer} New index after parsing
10798 */
10799
10800
10801 Cea708Stream.prototype.handleText = function (i, service, isExtended) {
10802 var packetData = this.current708Packet.data;
10803 var b = packetData[i];
10804 var extended = isExtended ? 0x1000 : 0x0000;
10805 var char = get708CharFromCode(extended | b);
10806 var win = service.currentWindow;
10807
10808 if (win.pendingNewLine && !win.isEmpty()) {
10809 win.newLine(this.getPts(i));
10810 }
10811
10812 win.pendingNewLine = false;
10813 win.addText(char);
10814 return i;
10815 };
10816 /**
10817 * Parse and execute the CW# command.
10818 *
10819 * Set the current window.
10820 *
10821 * @param {Integer} i Current index in the 708 packet
10822 * @param {Service} service The service object to be affected
10823 * @return {Integer} New index after parsing
10824 */
10825
10826
10827 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
10828 var packetData = this.current708Packet.data;
10829 var b = packetData[i];
10830 var windowNum = b & 0x07;
10831 service.setCurrentWindow(windowNum);
10832 return i;
10833 };
10834 /**
10835 * Parse and execute the DF# command.
10836 *
10837 * Define a window and set it as the current window.
10838 *
10839 * @param {Integer} i Current index in the 708 packet
10840 * @param {Service} service The service object to be affected
10841 * @return {Integer} New index after parsing
10842 */
10843
10844
10845 Cea708Stream.prototype.defineWindow = function (i, service) {
10846 var packetData = this.current708Packet.data;
10847 var b = packetData[i];
10848 var windowNum = b & 0x07;
10849 service.setCurrentWindow(windowNum);
10850 var win = service.currentWindow;
10851 b = packetData[++i];
10852 win.visible = (b & 0x20) >> 5; // v
10853
10854 win.rowLock = (b & 0x10) >> 4; // rl
10855
10856 win.columnLock = (b & 0x08) >> 3; // cl
10857
10858 win.priority = b & 0x07; // p
10859
10860 b = packetData[++i];
10861 win.relativePositioning = (b & 0x80) >> 7; // rp
10862
10863 win.anchorVertical = b & 0x7f; // av
10864
10865 b = packetData[++i];
10866 win.anchorHorizontal = b; // ah
10867
10868 b = packetData[++i];
10869 win.anchorPoint = (b & 0xf0) >> 4; // ap
10870
10871 win.rowCount = b & 0x0f; // rc
10872
10873 b = packetData[++i];
10874 win.columnCount = b & 0x3f; // cc
10875
10876 b = packetData[++i];
10877 win.windowStyle = (b & 0x38) >> 3; // ws
10878
10879 win.penStyle = b & 0x07; // ps
10880 // The spec says there are (rowCount+1) "virtual rows"
10881
10882 win.virtualRowCount = win.rowCount + 1;
10883 return i;
10884 };
10885 /**
10886 * Parse and execute the SWA command.
10887 *
10888 * Set attributes of the current window.
10889 *
10890 * @param {Integer} i Current index in the 708 packet
10891 * @param {Service} service The service object to be affected
10892 * @return {Integer} New index after parsing
10893 */
10894
10895
10896 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
10897 var packetData = this.current708Packet.data;
10898 var b = packetData[i];
10899 var winAttr = service.currentWindow.winAttr;
10900 b = packetData[++i];
10901 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
10902
10903 winAttr.fillRed = (b & 0x30) >> 4; // fr
10904
10905 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
10906
10907 winAttr.fillBlue = b & 0x03; // fb
10908
10909 b = packetData[++i];
10910 winAttr.borderType = (b & 0xc0) >> 6; // bt
10911
10912 winAttr.borderRed = (b & 0x30) >> 4; // br
10913
10914 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
10915
10916 winAttr.borderBlue = b & 0x03; // bb
10917
10918 b = packetData[++i];
10919 winAttr.borderType += (b & 0x80) >> 5; // bt
10920
10921 winAttr.wordWrap = (b & 0x40) >> 6; // ww
10922
10923 winAttr.printDirection = (b & 0x30) >> 4; // pd
10924
10925 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
10926
10927 winAttr.justify = b & 0x03; // j
10928
10929 b = packetData[++i];
10930 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
10931
10932 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
10933
10934 winAttr.displayEffect = b & 0x03; // de
10935
10936 return i;
10937 };
10938 /**
10939 * Gather text from all displayed windows and push a caption to output.
10940 *
10941 * @param {Integer} i Current index in the 708 packet
10942 * @param {Service} service The service object to be affected
10943 */
10944
10945
10946 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
10947 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
10948 // display text in the correct order, but sample files so far have not shown any issue.
10949
10950 for (var winId = 0; winId < 8; winId++) {
10951 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
10952 displayedText.push(service.windows[winId].getText());
10953 }
10954 }
10955
10956 service.endPts = pts;
10957 service.text = displayedText.join('\n\n');
10958 this.pushCaption(service);
10959 service.startPts = pts;
10960 };
10961 /**
10962 * Push a caption to output if the caption contains text.
10963 *
10964 * @param {Service} service The service object to be affected
10965 */
10966
10967
10968 Cea708Stream.prototype.pushCaption = function (service) {
10969 if (service.text !== '') {
10970 this.trigger('data', {
10971 startPts: service.startPts,
10972 endPts: service.endPts,
10973 text: service.text,
10974 stream: 'cc708_' + service.serviceNum
10975 });
10976 service.text = '';
10977 service.startPts = service.endPts;
10978 }
10979 };
10980 /**
10981 * Parse and execute the DSW command.
10982 *
10983 * Set visible property of windows based on the parsed bitmask.
10984 *
10985 * @param {Integer} i Current index in the 708 packet
10986 * @param {Service} service The service object to be affected
10987 * @return {Integer} New index after parsing
10988 */
10989
10990
10991 Cea708Stream.prototype.displayWindows = function (i, service) {
10992 var packetData = this.current708Packet.data;
10993 var b = packetData[++i];
10994 var pts = this.getPts(i);
10995 this.flushDisplayed(pts, service);
10996
10997 for (var winId = 0; winId < 8; winId++) {
10998 if (b & 0x01 << winId) {
10999 service.windows[winId].visible = 1;
11000 }
11001 }
11002
11003 return i;
11004 };
11005 /**
11006 * Parse and execute the HDW command.
11007 *
11008 * Set visible property of windows based on the parsed bitmask.
11009 *
11010 * @param {Integer} i Current index in the 708 packet
11011 * @param {Service} service The service object to be affected
11012 * @return {Integer} New index after parsing
11013 */
11014
11015
11016 Cea708Stream.prototype.hideWindows = function (i, service) {
11017 var packetData = this.current708Packet.data;
11018 var b = packetData[++i];
11019 var pts = this.getPts(i);
11020 this.flushDisplayed(pts, service);
11021
11022 for (var winId = 0; winId < 8; winId++) {
11023 if (b & 0x01 << winId) {
11024 service.windows[winId].visible = 0;
11025 }
11026 }
11027
11028 return i;
11029 };
11030 /**
11031 * Parse and execute the TGW command.
11032 *
11033 * Set visible property of windows based on the parsed bitmask.
11034 *
11035 * @param {Integer} i Current index in the 708 packet
11036 * @param {Service} service The service object to be affected
11037 * @return {Integer} New index after parsing
11038 */
11039
11040
11041 Cea708Stream.prototype.toggleWindows = function (i, service) {
11042 var packetData = this.current708Packet.data;
11043 var b = packetData[++i];
11044 var pts = this.getPts(i);
11045 this.flushDisplayed(pts, service);
11046
11047 for (var winId = 0; winId < 8; winId++) {
11048 if (b & 0x01 << winId) {
11049 service.windows[winId].visible ^= 1;
11050 }
11051 }
11052
11053 return i;
11054 };
11055 /**
11056 * Parse and execute the CLW command.
11057 *
11058 * Clear text of windows based on the parsed bitmask.
11059 *
11060 * @param {Integer} i Current index in the 708 packet
11061 * @param {Service} service The service object to be affected
11062 * @return {Integer} New index after parsing
11063 */
11064
11065
11066 Cea708Stream.prototype.clearWindows = function (i, service) {
11067 var packetData = this.current708Packet.data;
11068 var b = packetData[++i];
11069 var pts = this.getPts(i);
11070 this.flushDisplayed(pts, service);
11071
11072 for (var winId = 0; winId < 8; winId++) {
11073 if (b & 0x01 << winId) {
11074 service.windows[winId].clearText();
11075 }
11076 }
11077
11078 return i;
11079 };
11080 /**
11081 * Parse and execute the DLW command.
11082 *
11083 * Re-initialize windows based on the parsed bitmask.
11084 *
11085 * @param {Integer} i Current index in the 708 packet
11086 * @param {Service} service The service object to be affected
11087 * @return {Integer} New index after parsing
11088 */
11089
11090
11091 Cea708Stream.prototype.deleteWindows = function (i, service) {
11092 var packetData = this.current708Packet.data;
11093 var b = packetData[++i];
11094 var pts = this.getPts(i);
11095 this.flushDisplayed(pts, service);
11096
11097 for (var winId = 0; winId < 8; winId++) {
11098 if (b & 0x01 << winId) {
11099 service.windows[winId].reset();
11100 }
11101 }
11102
11103 return i;
11104 };
11105 /**
11106 * Parse and execute the SPA command.
11107 *
11108 * Set pen attributes of the current window.
11109 *
11110 * @param {Integer} i Current index in the 708 packet
11111 * @param {Service} service The service object to be affected
11112 * @return {Integer} New index after parsing
11113 */
11114
11115
11116 Cea708Stream.prototype.setPenAttributes = function (i, service) {
11117 var packetData = this.current708Packet.data;
11118 var b = packetData[i];
11119 var penAttr = service.currentWindow.penAttr;
11120 b = packetData[++i];
11121 penAttr.textTag = (b & 0xf0) >> 4; // tt
11122
11123 penAttr.offset = (b & 0x0c) >> 2; // o
11124
11125 penAttr.penSize = b & 0x03; // s
11126
11127 b = packetData[++i];
11128 penAttr.italics = (b & 0x80) >> 7; // i
11129
11130 penAttr.underline = (b & 0x40) >> 6; // u
11131
11132 penAttr.edgeType = (b & 0x38) >> 3; // et
11133
11134 penAttr.fontStyle = b & 0x07; // fs
11135
11136 return i;
11137 };
11138 /**
11139 * Parse and execute the SPC command.
11140 *
11141 * Set pen color of the current window.
11142 *
11143 * @param {Integer} i Current index in the 708 packet
11144 * @param {Service} service The service object to be affected
11145 * @return {Integer} New index after parsing
11146 */
11147
11148
11149 Cea708Stream.prototype.setPenColor = function (i, service) {
11150 var packetData = this.current708Packet.data;
11151 var b = packetData[i];
11152 var penColor = service.currentWindow.penColor;
11153 b = packetData[++i];
11154 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
11155
11156 penColor.fgRed = (b & 0x30) >> 4; // fr
11157
11158 penColor.fgGreen = (b & 0x0c) >> 2; // fg
11159
11160 penColor.fgBlue = b & 0x03; // fb
11161
11162 b = packetData[++i];
11163 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
11164
11165 penColor.bgRed = (b & 0x30) >> 4; // br
11166
11167 penColor.bgGreen = (b & 0x0c) >> 2; // bg
11168
11169 penColor.bgBlue = b & 0x03; // bb
11170
11171 b = packetData[++i];
11172 penColor.edgeRed = (b & 0x30) >> 4; // er
11173
11174 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
11175
11176 penColor.edgeBlue = b & 0x03; // eb
11177
11178 return i;
11179 };
11180 /**
11181 * Parse and execute the SPL command.
11182 *
11183 * Set pen location of the current window.
11184 *
11185 * @param {Integer} i Current index in the 708 packet
11186 * @param {Service} service The service object to be affected
11187 * @return {Integer} New index after parsing
11188 */
11189
11190
11191 Cea708Stream.prototype.setPenLocation = function (i, service) {
11192 var packetData = this.current708Packet.data;
11193 var b = packetData[i];
11194 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
11195
11196 service.currentWindow.pendingNewLine = true;
11197 b = packetData[++i];
11198 penLoc.row = b & 0x0f; // r
11199
11200 b = packetData[++i];
11201 penLoc.column = b & 0x3f; // c
11202
11203 return i;
11204 };
11205 /**
11206 * Execute the RST command.
11207 *
11208 * Reset service to a clean slate. Re-initialize.
11209 *
11210 * @param {Integer} i Current index in the 708 packet
11211 * @param {Service} service The service object to be affected
11212 * @return {Service} Re-initialized service
11213 */
11214
11215
11216 Cea708Stream.prototype.reset = function (i, service) {
11217 var pts = this.getPts(i);
11218 this.flushDisplayed(pts, service);
11219 return this.initService(service.serviceNum, i);
11220 }; // This hash maps non-ASCII, special, and extended character codes to their
11221 // proper Unicode equivalent. The first keys that are only a single byte
11222 // are the non-standard ASCII characters, which simply map the CEA608 byte
11223 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
11224 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
11225 // can be performed regardless of the field and data channel on which the
11226 // character code was received.
11227
11228
11229 var CHARACTER_TRANSLATION = {
11230 0x2a: 0xe1,
11231 // á
11232 0x5c: 0xe9,
11233 // é
11234 0x5e: 0xed,
11235 // í
11236 0x5f: 0xf3,
11237 // ó
11238 0x60: 0xfa,
11239 // ú
11240 0x7b: 0xe7,
11241 // ç
11242 0x7c: 0xf7,
11243 // ÷
11244 0x7d: 0xd1,
11245 // Ñ
11246 0x7e: 0xf1,
11247 // ñ
11248 0x7f: 0x2588,
11249 // █
11250 0x0130: 0xae,
11251 // ®
11252 0x0131: 0xb0,
11253 // °
11254 0x0132: 0xbd,
11255 // ½
11256 0x0133: 0xbf,
11257 // ¿
11258 0x0134: 0x2122,
11259 // ™
11260 0x0135: 0xa2,
11261 // ¢
11262 0x0136: 0xa3,
11263 // £
11264 0x0137: 0x266a,
11265 // ♪
11266 0x0138: 0xe0,
11267 // à
11268 0x0139: 0xa0,
11269 //
11270 0x013a: 0xe8,
11271 // è
11272 0x013b: 0xe2,
11273 // â
11274 0x013c: 0xea,
11275 // ê
11276 0x013d: 0xee,
11277 // î
11278 0x013e: 0xf4,
11279 // ô
11280 0x013f: 0xfb,
11281 // û
11282 0x0220: 0xc1,
11283 // Á
11284 0x0221: 0xc9,
11285 // É
11286 0x0222: 0xd3,
11287 // Ó
11288 0x0223: 0xda,
11289 // Ú
11290 0x0224: 0xdc,
11291 // Ü
11292 0x0225: 0xfc,
11293 // ü
11294 0x0226: 0x2018,
11295 // ‘
11296 0x0227: 0xa1,
11297 // ¡
11298 0x0228: 0x2a,
11299 // *
11300 0x0229: 0x27,
11301 // '
11302 0x022a: 0x2014,
11303 // —
11304 0x022b: 0xa9,
11305 // ©
11306 0x022c: 0x2120,
11307 // ℠
11308 0x022d: 0x2022,
11309 // •
11310 0x022e: 0x201c,
11311 // “
11312 0x022f: 0x201d,
11313 // ”
11314 0x0230: 0xc0,
11315 // À
11316 0x0231: 0xc2,
11317 // Â
11318 0x0232: 0xc7,
11319 // Ç
11320 0x0233: 0xc8,
11321 // È
11322 0x0234: 0xca,
11323 // Ê
11324 0x0235: 0xcb,
11325 // Ë
11326 0x0236: 0xeb,
11327 // ë
11328 0x0237: 0xce,
11329 // Î
11330 0x0238: 0xcf,
11331 // Ï
11332 0x0239: 0xef,
11333 // ï
11334 0x023a: 0xd4,
11335 // Ô
11336 0x023b: 0xd9,
11337 // Ù
11338 0x023c: 0xf9,
11339 // ù
11340 0x023d: 0xdb,
11341 // Û
11342 0x023e: 0xab,
11343 // «
11344 0x023f: 0xbb,
11345 // »
11346 0x0320: 0xc3,
11347 // Ã
11348 0x0321: 0xe3,
11349 // ã
11350 0x0322: 0xcd,
11351 // Í
11352 0x0323: 0xcc,
11353 // Ì
11354 0x0324: 0xec,
11355 // ì
11356 0x0325: 0xd2,
11357 // Ò
11358 0x0326: 0xf2,
11359 // ò
11360 0x0327: 0xd5,
11361 // Õ
11362 0x0328: 0xf5,
11363 // õ
11364 0x0329: 0x7b,
11365 // {
11366 0x032a: 0x7d,
11367 // }
11368 0x032b: 0x5c,
11369 // \
11370 0x032c: 0x5e,
11371 // ^
11372 0x032d: 0x5f,
11373 // _
11374 0x032e: 0x7c,
11375 // |
11376 0x032f: 0x7e,
11377 // ~
11378 0x0330: 0xc4,
11379 // Ä
11380 0x0331: 0xe4,
11381 // ä
11382 0x0332: 0xd6,
11383 // Ö
11384 0x0333: 0xf6,
11385 // ö
11386 0x0334: 0xdf,
11387 // ß
11388 0x0335: 0xa5,
11389 // ¥
11390 0x0336: 0xa4,
11391 // ¤
11392 0x0337: 0x2502,
11393 // │
11394 0x0338: 0xc5,
11395 // Å
11396 0x0339: 0xe5,
11397 // å
11398 0x033a: 0xd8,
11399 // Ø
11400 0x033b: 0xf8,
11401 // ø
11402 0x033c: 0x250c,
11403 // ┌
11404 0x033d: 0x2510,
11405 // ┐
11406 0x033e: 0x2514,
11407 // └
11408 0x033f: 0x2518 // ┘
11409
11410 };
11411
11412 var getCharFromCode = function getCharFromCode(code) {
11413 if (code === null) {
11414 return '';
11415 }
11416
11417 code = CHARACTER_TRANSLATION[code] || code;
11418 return String.fromCharCode(code);
11419 }; // the index of the last row in a CEA-608 display buffer
11420
11421
11422 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
11423 // getting it through bit logic.
11424
11425 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
11426 // cells. The "bottom" row is the last element in the outer array.
11427
11428 var createDisplayBuffer = function createDisplayBuffer() {
11429 var result = [],
11430 i = BOTTOM_ROW + 1;
11431
11432 while (i--) {
11433 result.push('');
11434 }
11435
11436 return result;
11437 };
11438
11439 var Cea608Stream = function Cea608Stream(field, dataChannel) {
11440 Cea608Stream.prototype.init.call(this);
11441 this.field_ = field || 0;
11442 this.dataChannel_ = dataChannel || 0;
11443 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
11444 this.setConstants();
11445 this.reset();
11446
11447 this.push = function (packet) {
11448 var data, swap, char0, char1, text; // remove the parity bits
11449
11450 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
11451
11452 if (data === this.lastControlCode_) {
11453 this.lastControlCode_ = null;
11454 return;
11455 } // Store control codes
11456
11457
11458 if ((data & 0xf000) === 0x1000) {
11459 this.lastControlCode_ = data;
11460 } else if (data !== this.PADDING_) {
11461 this.lastControlCode_ = null;
11462 }
11463
11464 char0 = data >>> 8;
11465 char1 = data & 0xff;
11466
11467 if (data === this.PADDING_) {
11468 return;
11469 } else if (data === this.RESUME_CAPTION_LOADING_) {
11470 this.mode_ = 'popOn';
11471 } else if (data === this.END_OF_CAPTION_) {
11472 // If an EOC is received while in paint-on mode, the displayed caption
11473 // text should be swapped to non-displayed memory as if it was a pop-on
11474 // caption. Because of that, we should explicitly switch back to pop-on
11475 // mode
11476 this.mode_ = 'popOn';
11477 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
11478
11479 this.flushDisplayed(packet.pts); // flip memory
11480
11481 swap = this.displayed_;
11482 this.displayed_ = this.nonDisplayed_;
11483 this.nonDisplayed_ = swap; // start measuring the time to display the caption
11484
11485 this.startPts_ = packet.pts;
11486 } else if (data === this.ROLL_UP_2_ROWS_) {
11487 this.rollUpRows_ = 2;
11488 this.setRollUp(packet.pts);
11489 } else if (data === this.ROLL_UP_3_ROWS_) {
11490 this.rollUpRows_ = 3;
11491 this.setRollUp(packet.pts);
11492 } else if (data === this.ROLL_UP_4_ROWS_) {
11493 this.rollUpRows_ = 4;
11494 this.setRollUp(packet.pts);
11495 } else if (data === this.CARRIAGE_RETURN_) {
11496 this.clearFormatting(packet.pts);
11497 this.flushDisplayed(packet.pts);
11498 this.shiftRowsUp_();
11499 this.startPts_ = packet.pts;
11500 } else if (data === this.BACKSPACE_) {
11501 if (this.mode_ === 'popOn') {
11502 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
11503 } else {
11504 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
11505 }
11506 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
11507 this.flushDisplayed(packet.pts);
11508 this.displayed_ = createDisplayBuffer();
11509 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
11510 this.nonDisplayed_ = createDisplayBuffer();
11511 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
11512 if (this.mode_ !== 'paintOn') {
11513 // NOTE: This should be removed when proper caption positioning is
11514 // implemented
11515 this.flushDisplayed(packet.pts);
11516 this.displayed_ = createDisplayBuffer();
11517 }
11518
11519 this.mode_ = 'paintOn';
11520 this.startPts_ = packet.pts; // Append special characters to caption text
11521 } else if (this.isSpecialCharacter(char0, char1)) {
11522 // Bitmask char0 so that we can apply character transformations
11523 // regardless of field and data channel.
11524 // Then byte-shift to the left and OR with char1 so we can pass the
11525 // entire character code to `getCharFromCode`.
11526 char0 = (char0 & 0x03) << 8;
11527 text = getCharFromCode(char0 | char1);
11528 this[this.mode_](packet.pts, text);
11529 this.column_++; // Append extended characters to caption text
11530 } else if (this.isExtCharacter(char0, char1)) {
11531 // Extended characters always follow their "non-extended" equivalents.
11532 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
11533 // decoders are supposed to drop the "è", while compliant decoders
11534 // backspace the "e" and insert "è".
11535 // Delete the previous character
11536 if (this.mode_ === 'popOn') {
11537 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
11538 } else {
11539 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
11540 } // Bitmask char0 so that we can apply character transformations
11541 // regardless of field and data channel.
11542 // Then byte-shift to the left and OR with char1 so we can pass the
11543 // entire character code to `getCharFromCode`.
11544
11545
11546 char0 = (char0 & 0x03) << 8;
11547 text = getCharFromCode(char0 | char1);
11548 this[this.mode_](packet.pts, text);
11549 this.column_++; // Process mid-row codes
11550 } else if (this.isMidRowCode(char0, char1)) {
11551 // Attributes are not additive, so clear all formatting
11552 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
11553 // should be replaced with spaces, so add one now
11554
11555 this[this.mode_](packet.pts, ' ');
11556 this.column_++;
11557
11558 if ((char1 & 0xe) === 0xe) {
11559 this.addFormatting(packet.pts, ['i']);
11560 }
11561
11562 if ((char1 & 0x1) === 0x1) {
11563 this.addFormatting(packet.pts, ['u']);
11564 } // Detect offset control codes and adjust cursor
11565
11566 } else if (this.isOffsetControlCode(char0, char1)) {
11567 // Cursor position is set by indent PAC (see below) in 4-column
11568 // increments, with an additional offset code of 1-3 to reach any
11569 // of the 32 columns specified by CEA-608. So all we need to do
11570 // here is increment the column cursor by the given offset.
11571 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
11572 } else if (this.isPAC(char0, char1)) {
11573 // There's no logic for PAC -> row mapping, so we have to just
11574 // find the row code in an array and use its index :(
11575 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
11576
11577 if (this.mode_ === 'rollUp') {
11578 // This implies that the base row is incorrectly set.
11579 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
11580 // of roll-up rows set.
11581 if (row - this.rollUpRows_ + 1 < 0) {
11582 row = this.rollUpRows_ - 1;
11583 }
11584
11585 this.setRollUp(packet.pts, row);
11586 }
11587
11588 if (row !== this.row_) {
11589 // formatting is only persistent for current row
11590 this.clearFormatting(packet.pts);
11591 this.row_ = row;
11592 } // All PACs can apply underline, so detect and apply
11593 // (All odd-numbered second bytes set underline)
11594
11595
11596 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
11597 this.addFormatting(packet.pts, ['u']);
11598 }
11599
11600 if ((data & 0x10) === 0x10) {
11601 // We've got an indent level code. Each successive even number
11602 // increments the column cursor by 4, so we can get the desired
11603 // column position by bit-shifting to the right (to get n/2)
11604 // and multiplying by 4.
11605 this.column_ = ((data & 0xe) >> 1) * 4;
11606 }
11607
11608 if (this.isColorPAC(char1)) {
11609 // it's a color code, though we only support white, which
11610 // can be either normal or italicized. white italics can be
11611 // either 0x4e or 0x6e depending on the row, so we just
11612 // bitwise-and with 0xe to see if italics should be turned on
11613 if ((char1 & 0xe) === 0xe) {
11614 this.addFormatting(packet.pts, ['i']);
11615 }
11616 } // We have a normal character in char0, and possibly one in char1
11617
11618 } else if (this.isNormalChar(char0)) {
11619 if (char1 === 0x00) {
11620 char1 = null;
11621 }
11622
11623 text = getCharFromCode(char0);
11624 text += getCharFromCode(char1);
11625 this[this.mode_](packet.pts, text);
11626 this.column_ += text.length;
11627 } // finish data processing
11628
11629 };
11630 };
11631
11632 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
11633 // display buffer
11634
11635 Cea608Stream.prototype.flushDisplayed = function (pts) {
11636 var content = this.displayed_ // remove spaces from the start and end of the string
11637 .map(function (row) {
11638 try {
11639 return row.trim();
11640 } catch (e) {
11641 // Ordinarily, this shouldn't happen. However, caption
11642 // parsing errors should not throw exceptions and
11643 // break playback.
11644 // eslint-disable-next-line no-console
11645 console.error('Skipping malformed caption.');
11646 return '';
11647 }
11648 }) // combine all text rows to display in one cue
11649 .join('\n') // and remove blank rows from the start and end, but not the middle
11650 .replace(/^\n+|\n+$/g, '');
11651
11652 if (content.length) {
11653 this.trigger('data', {
11654 startPts: this.startPts_,
11655 endPts: pts,
11656 text: content,
11657 stream: this.name_
11658 });
11659 }
11660 };
11661 /**
11662 * Zero out the data, used for startup and on seek
11663 */
11664
11665
11666 Cea608Stream.prototype.reset = function () {
11667 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
11668 // actually display captions. If a caption is shifted to a row
11669 // with a lower index than this, it is cleared from the display
11670 // buffer
11671
11672 this.topRow_ = 0;
11673 this.startPts_ = 0;
11674 this.displayed_ = createDisplayBuffer();
11675 this.nonDisplayed_ = createDisplayBuffer();
11676 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
11677
11678 this.column_ = 0;
11679 this.row_ = BOTTOM_ROW;
11680 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
11681
11682 this.formatting_ = [];
11683 };
11684 /**
11685 * Sets up control code and related constants for this instance
11686 */
11687
11688
11689 Cea608Stream.prototype.setConstants = function () {
11690 // The following attributes have these uses:
11691 // ext_ : char0 for mid-row codes, and the base for extended
11692 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
11693 // extended codes)
11694 // control_: char0 for control codes, except byte-shifted to the
11695 // left so that we can do this.control_ | CONTROL_CODE
11696 // offset_: char0 for tab offset codes
11697 //
11698 // It's also worth noting that control codes, and _only_ control codes,
11699 // differ between field 1 and field2. Field 2 control codes are always
11700 // their field 1 value plus 1. That's why there's the "| field" on the
11701 // control value.
11702 if (this.dataChannel_ === 0) {
11703 this.BASE_ = 0x10;
11704 this.EXT_ = 0x11;
11705 this.CONTROL_ = (0x14 | this.field_) << 8;
11706 this.OFFSET_ = 0x17;
11707 } else if (this.dataChannel_ === 1) {
11708 this.BASE_ = 0x18;
11709 this.EXT_ = 0x19;
11710 this.CONTROL_ = (0x1c | this.field_) << 8;
11711 this.OFFSET_ = 0x1f;
11712 } // Constants for the LSByte command codes recognized by Cea608Stream. This
11713 // list is not exhaustive. For a more comprehensive listing and semantics see
11714 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
11715 // Padding
11716
11717
11718 this.PADDING_ = 0x0000; // Pop-on Mode
11719
11720 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
11721 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
11722
11723 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
11724 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
11725 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
11726 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
11727
11728 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
11729
11730 this.BACKSPACE_ = this.CONTROL_ | 0x21;
11731 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
11732 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
11733 };
11734 /**
11735 * Detects if the 2-byte packet data is a special character
11736 *
11737 * Special characters have a second byte in the range 0x30 to 0x3f,
11738 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
11739 * data channel 2).
11740 *
11741 * @param {Integer} char0 The first byte
11742 * @param {Integer} char1 The second byte
11743 * @return {Boolean} Whether the 2 bytes are an special character
11744 */
11745
11746
11747 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
11748 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
11749 };
11750 /**
11751 * Detects if the 2-byte packet data is an extended character
11752 *
11753 * Extended characters have a second byte in the range 0x20 to 0x3f,
11754 * with the first byte being 0x12 or 0x13 (for data channel 1) or
11755 * 0x1a or 0x1b (for data channel 2).
11756 *
11757 * @param {Integer} char0 The first byte
11758 * @param {Integer} char1 The second byte
11759 * @return {Boolean} Whether the 2 bytes are an extended character
11760 */
11761
11762
11763 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
11764 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
11765 };
11766 /**
11767 * Detects if the 2-byte packet is a mid-row code
11768 *
11769 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
11770 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
11771 * channel 2).
11772 *
11773 * @param {Integer} char0 The first byte
11774 * @param {Integer} char1 The second byte
11775 * @return {Boolean} Whether the 2 bytes are a mid-row code
11776 */
11777
11778
11779 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
11780 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
11781 };
11782 /**
11783 * Detects if the 2-byte packet is an offset control code
11784 *
11785 * Offset control codes have a second byte in the range 0x21 to 0x23,
11786 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
11787 * data channel 2).
11788 *
11789 * @param {Integer} char0 The first byte
11790 * @param {Integer} char1 The second byte
11791 * @return {Boolean} Whether the 2 bytes are an offset control code
11792 */
11793
11794
11795 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
11796 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
11797 };
11798 /**
11799 * Detects if the 2-byte packet is a Preamble Address Code
11800 *
11801 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
11802 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
11803 * range 0x40 to 0x7f.
11804 *
11805 * @param {Integer} char0 The first byte
11806 * @param {Integer} char1 The second byte
11807 * @return {Boolean} Whether the 2 bytes are a PAC
11808 */
11809
11810
11811 Cea608Stream.prototype.isPAC = function (char0, char1) {
11812 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
11813 };
11814 /**
11815 * Detects if a packet's second byte is in the range of a PAC color code
11816 *
11817 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
11818 * 0x60 to 0x6f.
11819 *
11820 * @param {Integer} char1 The second byte
11821 * @return {Boolean} Whether the byte is a color PAC
11822 */
11823
11824
11825 Cea608Stream.prototype.isColorPAC = function (char1) {
11826 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
11827 };
11828 /**
11829 * Detects if a single byte is in the range of a normal character
11830 *
11831 * Normal text bytes are in the range 0x20 to 0x7f.
11832 *
11833 * @param {Integer} char The byte
11834 * @return {Boolean} Whether the byte is a normal character
11835 */
11836
11837
11838 Cea608Stream.prototype.isNormalChar = function (char) {
11839 return char >= 0x20 && char <= 0x7f;
11840 };
11841 /**
11842 * Configures roll-up
11843 *
11844 * @param {Integer} pts Current PTS
11845 * @param {Integer} newBaseRow Used by PACs to slide the current window to
11846 * a new position
11847 */
11848
11849
11850 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
11851 // Reset the base row to the bottom row when switching modes
11852 if (this.mode_ !== 'rollUp') {
11853 this.row_ = BOTTOM_ROW;
11854 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
11855
11856 this.flushDisplayed(pts);
11857 this.nonDisplayed_ = createDisplayBuffer();
11858 this.displayed_ = createDisplayBuffer();
11859 }
11860
11861 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
11862 // move currently displayed captions (up or down) to the new base row
11863 for (var i = 0; i < this.rollUpRows_; i++) {
11864 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
11865 this.displayed_[this.row_ - i] = '';
11866 }
11867 }
11868
11869 if (newBaseRow === undefined) {
11870 newBaseRow = this.row_;
11871 }
11872
11873 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
11874 }; // Adds the opening HTML tag for the passed character to the caption text,
11875 // and keeps track of it for later closing
11876
11877
11878 Cea608Stream.prototype.addFormatting = function (pts, format) {
11879 this.formatting_ = this.formatting_.concat(format);
11880 var text = format.reduce(function (text, format) {
11881 return text + '<' + format + '>';
11882 }, '');
11883 this[this.mode_](pts, text);
11884 }; // Adds HTML closing tags for current formatting to caption text and
11885 // clears remembered formatting
11886
11887
11888 Cea608Stream.prototype.clearFormatting = function (pts) {
11889 if (!this.formatting_.length) {
11890 return;
11891 }
11892
11893 var text = this.formatting_.reverse().reduce(function (text, format) {
11894 return text + '</' + format + '>';
11895 }, '');
11896 this.formatting_ = [];
11897 this[this.mode_](pts, text);
11898 }; // Mode Implementations
11899
11900
11901 Cea608Stream.prototype.popOn = function (pts, text) {
11902 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
11903
11904 baseRow += text;
11905 this.nonDisplayed_[this.row_] = baseRow;
11906 };
11907
11908 Cea608Stream.prototype.rollUp = function (pts, text) {
11909 var baseRow = this.displayed_[this.row_];
11910 baseRow += text;
11911 this.displayed_[this.row_] = baseRow;
11912 };
11913
11914 Cea608Stream.prototype.shiftRowsUp_ = function () {
11915 var i; // clear out inactive rows
11916
11917 for (i = 0; i < this.topRow_; i++) {
11918 this.displayed_[i] = '';
11919 }
11920
11921 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
11922 this.displayed_[i] = '';
11923 } // shift displayed rows up
11924
11925
11926 for (i = this.topRow_; i < this.row_; i++) {
11927 this.displayed_[i] = this.displayed_[i + 1];
11928 } // clear out the bottom row
11929
11930
11931 this.displayed_[this.row_] = '';
11932 };
11933
11934 Cea608Stream.prototype.paintOn = function (pts, text) {
11935 var baseRow = this.displayed_[this.row_];
11936 baseRow += text;
11937 this.displayed_[this.row_] = baseRow;
11938 }; // exports
11939
11940
11941 var captionStream = {
11942 CaptionStream: CaptionStream$1,
11943 Cea608Stream: Cea608Stream,
11944 Cea708Stream: Cea708Stream
11945 };
11946 /**
11947 * mux.js
11948 *
11949 * Copyright (c) Brightcove
11950 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11951 */
11952
11953 var streamTypes = {
11954 H264_STREAM_TYPE: 0x1B,
11955 ADTS_STREAM_TYPE: 0x0F,
11956 METADATA_STREAM_TYPE: 0x15
11957 };
11958 var MAX_TS = 8589934592;
11959 var RO_THRESH = 4294967296;
11960 var TYPE_SHARED = 'shared';
11961
11962 var handleRollover = function handleRollover(value, reference) {
11963 var direction = 1;
11964
11965 if (value > reference) {
11966 // If the current timestamp value is greater than our reference timestamp and we detect a
11967 // timestamp rollover, this means the roll over is happening in the opposite direction.
11968 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
11969 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
11970 // rollover point. In loading this segment, the timestamp values will be very large,
11971 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
11972 // the time stamp to be `value - 2^33`.
11973 direction = -1;
11974 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
11975 // cause an incorrect adjustment.
11976
11977
11978 while (Math.abs(reference - value) > RO_THRESH) {
11979 value += direction * MAX_TS;
11980 }
11981
11982 return value;
11983 };
11984
11985 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
11986 var lastDTS, referenceDTS;
11987 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
11988 // video and audio. We could use `undefined` here, but having a string
11989 // makes debugging a little clearer.
11990
11991 this.type_ = type || TYPE_SHARED;
11992
11993 this.push = function (data) {
11994 // Any "shared" rollover streams will accept _all_ data. Otherwise,
11995 // streams will only accept data that matches their type.
11996 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
11997 return;
11998 }
11999
12000 if (referenceDTS === undefined) {
12001 referenceDTS = data.dts;
12002 }
12003
12004 data.dts = handleRollover(data.dts, referenceDTS);
12005 data.pts = handleRollover(data.pts, referenceDTS);
12006 lastDTS = data.dts;
12007 this.trigger('data', data);
12008 };
12009
12010 this.flush = function () {
12011 referenceDTS = lastDTS;
12012 this.trigger('done');
12013 };
12014
12015 this.endTimeline = function () {
12016 this.flush();
12017 this.trigger('endedtimeline');
12018 };
12019
12020 this.discontinuity = function () {
12021 referenceDTS = void 0;
12022 lastDTS = void 0;
12023 };
12024
12025 this.reset = function () {
12026 this.discontinuity();
12027 this.trigger('reset');
12028 };
12029 };
12030
12031 TimestampRolloverStream$1.prototype = new stream();
12032 var timestampRolloverStream = {
12033 TimestampRolloverStream: TimestampRolloverStream$1,
12034 handleRollover: handleRollover
12035 };
12036
12037 var percentEncode$1 = function percentEncode(bytes, start, end) {
12038 var i,
12039 result = '';
12040
12041 for (i = start; i < end; i++) {
12042 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
12043 }
12044
12045 return result;
12046 },
12047 // return the string representation of the specified byte range,
12048 // interpreted as UTf-8.
12049 parseUtf8 = function parseUtf8(bytes, start, end) {
12050 return decodeURIComponent(percentEncode$1(bytes, start, end));
12051 },
12052 // return the string representation of the specified byte range,
12053 // interpreted as ISO-8859-1.
12054 parseIso88591$1 = function parseIso88591(bytes, start, end) {
12055 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
12056 },
12057 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
12058 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
12059 },
12060 tagParsers = {
12061 TXXX: function TXXX(tag) {
12062 var i;
12063
12064 if (tag.data[0] !== 3) {
12065 // ignore frames with unrecognized character encodings
12066 return;
12067 }
12068
12069 for (i = 1; i < tag.data.length; i++) {
12070 if (tag.data[i] === 0) {
12071 // parse the text fields
12072 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
12073
12074 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
12075 break;
12076 }
12077 }
12078
12079 tag.data = tag.value;
12080 },
12081 WXXX: function WXXX(tag) {
12082 var i;
12083
12084 if (tag.data[0] !== 3) {
12085 // ignore frames with unrecognized character encodings
12086 return;
12087 }
12088
12089 for (i = 1; i < tag.data.length; i++) {
12090 if (tag.data[i] === 0) {
12091 // parse the description and URL fields
12092 tag.description = parseUtf8(tag.data, 1, i);
12093 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
12094 break;
12095 }
12096 }
12097 },
12098 PRIV: function PRIV(tag) {
12099 var i;
12100
12101 for (i = 0; i < tag.data.length; i++) {
12102 if (tag.data[i] === 0) {
12103 // parse the description and URL fields
12104 tag.owner = parseIso88591$1(tag.data, 0, i);
12105 break;
12106 }
12107 }
12108
12109 tag.privateData = tag.data.subarray(i + 1);
12110 tag.data = tag.privateData;
12111 }
12112 },
12113 _MetadataStream;
12114
12115 _MetadataStream = function MetadataStream(options) {
12116 var settings = {
12117 debug: !!(options && options.debug),
12118 // the bytes of the program-level descriptor field in MP2T
12119 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
12120 // program element descriptors"
12121 descriptor: options && options.descriptor
12122 },
12123 // the total size in bytes of the ID3 tag being parsed
12124 tagSize = 0,
12125 // tag data that is not complete enough to be parsed
12126 buffer = [],
12127 // the total number of bytes currently in the buffer
12128 bufferSize = 0,
12129 i;
12130
12131 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
12132 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
12133
12134
12135 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
12136
12137 if (settings.descriptor) {
12138 for (i = 0; i < settings.descriptor.length; i++) {
12139 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
12140 }
12141 }
12142
12143 this.push = function (chunk) {
12144 var tag, frameStart, frameSize, frame, i, frameHeader;
12145
12146 if (chunk.type !== 'timed-metadata') {
12147 return;
12148 } // if data_alignment_indicator is set in the PES header,
12149 // we must have the start of a new ID3 tag. Assume anything
12150 // remaining in the buffer was malformed and throw it out
12151
12152
12153 if (chunk.dataAlignmentIndicator) {
12154 bufferSize = 0;
12155 buffer.length = 0;
12156 } // ignore events that don't look like ID3 data
12157
12158
12159 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
12160 if (settings.debug) {
12161 // eslint-disable-next-line no-console
12162 console.log('Skipping unrecognized metadata packet');
12163 }
12164
12165 return;
12166 } // add this chunk to the data we've collected so far
12167
12168
12169 buffer.push(chunk);
12170 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
12171
12172 if (buffer.length === 1) {
12173 // the frame size is transmitted as a 28-bit integer in the
12174 // last four bytes of the ID3 header.
12175 // The most significant bit of each byte is dropped and the
12176 // results concatenated to recover the actual value.
12177 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
12178 // convenient for our comparisons to include it
12179
12180 tagSize += 10;
12181 } // if the entire frame has not arrived, wait for more data
12182
12183
12184 if (bufferSize < tagSize) {
12185 return;
12186 } // collect the entire frame so it can be parsed
12187
12188
12189 tag = {
12190 data: new Uint8Array(tagSize),
12191 frames: [],
12192 pts: buffer[0].pts,
12193 dts: buffer[0].dts
12194 };
12195
12196 for (i = 0; i < tagSize;) {
12197 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
12198 i += buffer[0].data.byteLength;
12199 bufferSize -= buffer[0].data.byteLength;
12200 buffer.shift();
12201 } // find the start of the first frame and the end of the tag
12202
12203
12204 frameStart = 10;
12205
12206 if (tag.data[5] & 0x40) {
12207 // advance the frame start past the extended header
12208 frameStart += 4; // header size field
12209
12210 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
12211
12212 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
12213 } // parse one or more ID3 frames
12214 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
12215
12216
12217 do {
12218 // determine the number of bytes in this frame
12219 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
12220
12221 if (frameSize < 1) {
12222 // eslint-disable-next-line no-console
12223 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
12224 }
12225
12226 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
12227 frame = {
12228 id: frameHeader,
12229 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
12230 };
12231 frame.key = frame.id;
12232
12233 if (tagParsers[frame.id]) {
12234 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
12235 // time for raw AAC data
12236
12237 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
12238 var d = frame.data,
12239 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
12240 size *= 4;
12241 size += d[7] & 0x03;
12242 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
12243 // on the value of this frame
12244 // we couldn't have known the appropriate pts and dts before
12245 // parsing this ID3 tag so set those values now
12246
12247 if (tag.pts === undefined && tag.dts === undefined) {
12248 tag.pts = frame.timeStamp;
12249 tag.dts = frame.timeStamp;
12250 }
12251
12252 this.trigger('timestamp', frame);
12253 }
12254 }
12255
12256 tag.frames.push(frame);
12257 frameStart += 10; // advance past the frame header
12258
12259 frameStart += frameSize; // advance past the frame body
12260 } while (frameStart < tagSize);
12261
12262 this.trigger('data', tag);
12263 };
12264 };
12265
12266 _MetadataStream.prototype = new stream();
12267 var metadataStream = _MetadataStream;
12268 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
12269
12270 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
12271
12272
12273 var MP2T_PACKET_LENGTH = 188,
12274 // bytes
12275 SYNC_BYTE = 0x47;
12276 /**
12277 * Splits an incoming stream of binary data into MPEG-2 Transport
12278 * Stream packets.
12279 */
12280
12281 _TransportPacketStream = function TransportPacketStream() {
12282 var buffer = new Uint8Array(MP2T_PACKET_LENGTH),
12283 bytesInBuffer = 0;
12284
12285 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
12286
12287 /**
12288 * Split a stream of data into M2TS packets
12289 **/
12290
12291
12292 this.push = function (bytes) {
12293 var startIndex = 0,
12294 endIndex = MP2T_PACKET_LENGTH,
12295 everything; // If there are bytes remaining from the last segment, prepend them to the
12296 // bytes that were pushed in
12297
12298 if (bytesInBuffer) {
12299 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
12300 everything.set(buffer.subarray(0, bytesInBuffer));
12301 everything.set(bytes, bytesInBuffer);
12302 bytesInBuffer = 0;
12303 } else {
12304 everything = bytes;
12305 } // While we have enough data for a packet
12306
12307
12308 while (endIndex < everything.byteLength) {
12309 // Look for a pair of start and end sync bytes in the data..
12310 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
12311 // We found a packet so emit it and jump one whole packet forward in
12312 // the stream
12313 this.trigger('data', everything.subarray(startIndex, endIndex));
12314 startIndex += MP2T_PACKET_LENGTH;
12315 endIndex += MP2T_PACKET_LENGTH;
12316 continue;
12317 } // If we get here, we have somehow become de-synchronized and we need to step
12318 // forward one byte at a time until we find a pair of sync bytes that denote
12319 // a packet
12320
12321
12322 startIndex++;
12323 endIndex++;
12324 } // If there was some data left over at the end of the segment that couldn't
12325 // possibly be a whole packet, keep it because it might be the start of a packet
12326 // that continues in the next segment
12327
12328
12329 if (startIndex < everything.byteLength) {
12330 buffer.set(everything.subarray(startIndex), 0);
12331 bytesInBuffer = everything.byteLength - startIndex;
12332 }
12333 };
12334 /**
12335 * Passes identified M2TS packets to the TransportParseStream to be parsed
12336 **/
12337
12338
12339 this.flush = function () {
12340 // If the buffer contains a whole packet when we are being flushed, emit it
12341 // and empty the buffer. Otherwise hold onto the data because it may be
12342 // important for decoding the next segment
12343 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
12344 this.trigger('data', buffer);
12345 bytesInBuffer = 0;
12346 }
12347
12348 this.trigger('done');
12349 };
12350
12351 this.endTimeline = function () {
12352 this.flush();
12353 this.trigger('endedtimeline');
12354 };
12355
12356 this.reset = function () {
12357 bytesInBuffer = 0;
12358 this.trigger('reset');
12359 };
12360 };
12361
12362 _TransportPacketStream.prototype = new stream();
12363 /**
12364 * Accepts an MP2T TransportPacketStream and emits data events with parsed
12365 * forms of the individual transport stream packets.
12366 */
12367
12368 _TransportParseStream = function TransportParseStream() {
12369 var parsePsi, parsePat, parsePmt, self;
12370
12371 _TransportParseStream.prototype.init.call(this);
12372
12373 self = this;
12374 this.packetsWaitingForPmt = [];
12375 this.programMapTable = undefined;
12376
12377 parsePsi = function parsePsi(payload, psi) {
12378 var offset = 0; // PSI packets may be split into multiple sections and those
12379 // sections may be split into multiple packets. If a PSI
12380 // section starts in this packet, the payload_unit_start_indicator
12381 // will be true and the first byte of the payload will indicate
12382 // the offset from the current position to the start of the
12383 // section.
12384
12385 if (psi.payloadUnitStartIndicator) {
12386 offset += payload[offset] + 1;
12387 }
12388
12389 if (psi.type === 'pat') {
12390 parsePat(payload.subarray(offset), psi);
12391 } else {
12392 parsePmt(payload.subarray(offset), psi);
12393 }
12394 };
12395
12396 parsePat = function parsePat(payload, pat) {
12397 pat.section_number = payload[7]; // eslint-disable-line camelcase
12398
12399 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
12400 // skip the PSI header and parse the first PMT entry
12401
12402 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
12403 pat.pmtPid = self.pmtPid;
12404 };
12405 /**
12406 * Parse out the relevant fields of a Program Map Table (PMT).
12407 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
12408 * packet. The first byte in this array should be the table_id
12409 * field.
12410 * @param pmt {object} the object that should be decorated with
12411 * fields parsed from the PMT.
12412 */
12413
12414
12415 parsePmt = function parsePmt(payload, pmt) {
12416 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
12417 // take effect. We don't believe this should ever be the case
12418 // for HLS but we'll ignore "forward" PMT declarations if we see
12419 // them. Future PMT declarations have the current_next_indicator
12420 // set to zero.
12421
12422 if (!(payload[5] & 0x01)) {
12423 return;
12424 } // overwrite any existing program map table
12425
12426
12427 self.programMapTable = {
12428 video: null,
12429 audio: null,
12430 'timed-metadata': {}
12431 }; // the mapping table ends at the end of the current section
12432
12433 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
12434 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
12435 // long the program info descriptors are
12436
12437 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
12438
12439 offset = 12 + programInfoLength;
12440
12441 while (offset < tableEnd) {
12442 var streamType = payload[offset];
12443 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
12444 // TODO: should this be done for metadata too? for now maintain behavior of
12445 // multiple metadata streams
12446
12447 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
12448 self.programMapTable.video = pid;
12449 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
12450 self.programMapTable.audio = pid;
12451 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
12452 // map pid to stream type for metadata streams
12453 self.programMapTable['timed-metadata'][pid] = streamType;
12454 } // move to the next table entry
12455 // skip past the elementary stream descriptors, if present
12456
12457
12458 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
12459 } // record the map on the packet as well
12460
12461
12462 pmt.programMapTable = self.programMapTable;
12463 };
12464 /**
12465 * Deliver a new MP2T packet to the next stream in the pipeline.
12466 */
12467
12468
12469 this.push = function (packet) {
12470 var result = {},
12471 offset = 4;
12472 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
12473
12474 result.pid = packet[1] & 0x1f;
12475 result.pid <<= 8;
12476 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
12477 // fifth byte of the TS packet header. The adaptation field is
12478 // used to add stuffing to PES packets that don't fill a complete
12479 // TS packet, and to specify some forms of timing and control data
12480 // that we do not currently use.
12481
12482 if ((packet[3] & 0x30) >>> 4 > 0x01) {
12483 offset += packet[offset] + 1;
12484 } // parse the rest of the packet based on the type
12485
12486
12487 if (result.pid === 0) {
12488 result.type = 'pat';
12489 parsePsi(packet.subarray(offset), result);
12490 this.trigger('data', result);
12491 } else if (result.pid === this.pmtPid) {
12492 result.type = 'pmt';
12493 parsePsi(packet.subarray(offset), result);
12494 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
12495
12496 while (this.packetsWaitingForPmt.length) {
12497 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
12498 }
12499 } else if (this.programMapTable === undefined) {
12500 // When we have not seen a PMT yet, defer further processing of
12501 // PES packets until one has been parsed
12502 this.packetsWaitingForPmt.push([packet, offset, result]);
12503 } else {
12504 this.processPes_(packet, offset, result);
12505 }
12506 };
12507
12508 this.processPes_ = function (packet, offset, result) {
12509 // set the appropriate stream type
12510 if (result.pid === this.programMapTable.video) {
12511 result.streamType = streamTypes.H264_STREAM_TYPE;
12512 } else if (result.pid === this.programMapTable.audio) {
12513 result.streamType = streamTypes.ADTS_STREAM_TYPE;
12514 } else {
12515 // if not video or audio, it is timed-metadata or unknown
12516 // if unknown, streamType will be undefined
12517 result.streamType = this.programMapTable['timed-metadata'][result.pid];
12518 }
12519
12520 result.type = 'pes';
12521 result.data = packet.subarray(offset);
12522 this.trigger('data', result);
12523 };
12524 };
12525
12526 _TransportParseStream.prototype = new stream();
12527 _TransportParseStream.STREAM_TYPES = {
12528 h264: 0x1b,
12529 adts: 0x0f
12530 };
12531 /**
12532 * Reconsistutes program elementary stream (PES) packets from parsed
12533 * transport stream packets. That is, if you pipe an
12534 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
12535 * events will be events which capture the bytes for individual PES
12536 * packets plus relevant metadata that has been extracted from the
12537 * container.
12538 */
12539
12540 _ElementaryStream = function ElementaryStream() {
12541 var self = this,
12542 // PES packet fragments
12543 video = {
12544 data: [],
12545 size: 0
12546 },
12547 audio = {
12548 data: [],
12549 size: 0
12550 },
12551 timedMetadata = {
12552 data: [],
12553 size: 0
12554 },
12555 programMapTable,
12556 parsePes = function parsePes(payload, pes) {
12557 var ptsDtsFlags;
12558 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
12559
12560 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
12561 // that are frame data that is continuing from the previous fragment. This
12562 // is to check that the pes data is the start of a new pes payload
12563
12564 if (startPrefix !== 1) {
12565 return;
12566 } // get the packet length, this will be 0 for video
12567
12568
12569 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
12570
12571 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
12572 // and a DTS value. Determine what combination of values is
12573 // available to work with.
12574
12575 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
12576 // performs all bitwise operations on 32-bit integers but javascript
12577 // supports a much greater range (52-bits) of integer using standard
12578 // mathematical operations.
12579 // We construct a 31-bit value using bitwise operators over the 31
12580 // most significant bits and then multiply by 4 (equal to a left-shift
12581 // of 2) before we add the final 2 least significant bits of the
12582 // timestamp (equal to an OR.)
12583
12584 if (ptsDtsFlags & 0xC0) {
12585 // the PTS and DTS are not written out directly. For information
12586 // on how they are encoded, see
12587 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
12588 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
12589 pes.pts *= 4; // Left shift by 2
12590
12591 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
12592
12593 pes.dts = pes.pts;
12594
12595 if (ptsDtsFlags & 0x40) {
12596 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
12597 pes.dts *= 4; // Left shift by 2
12598
12599 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
12600 }
12601 } // the data section starts immediately after the PES header.
12602 // pes_header_data_length specifies the number of header bytes
12603 // that follow the last byte of the field.
12604
12605
12606 pes.data = payload.subarray(9 + payload[8]);
12607 },
12608
12609 /**
12610 * Pass completely parsed PES packets to the next stream in the pipeline
12611 **/
12612 flushStream = function flushStream(stream, type, forceFlush) {
12613 var packetData = new Uint8Array(stream.size),
12614 event = {
12615 type: type
12616 },
12617 i = 0,
12618 offset = 0,
12619 packetFlushable = false,
12620 fragment; // do nothing if there is not enough buffered data for a complete
12621 // PES header
12622
12623 if (!stream.data.length || stream.size < 9) {
12624 return;
12625 }
12626
12627 event.trackId = stream.data[0].pid; // reassemble the packet
12628
12629 for (i = 0; i < stream.data.length; i++) {
12630 fragment = stream.data[i];
12631 packetData.set(fragment.data, offset);
12632 offset += fragment.data.byteLength;
12633 } // parse assembled packet's PES header
12634
12635
12636 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
12637 // check that there is enough stream data to fill the packet
12638
12639 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
12640
12641 if (forceFlush || packetFlushable) {
12642 stream.size = 0;
12643 stream.data.length = 0;
12644 } // only emit packets that are complete. this is to avoid assembling
12645 // incomplete PES packets due to poor segmentation
12646
12647
12648 if (packetFlushable) {
12649 self.trigger('data', event);
12650 }
12651 };
12652
12653 _ElementaryStream.prototype.init.call(this);
12654 /**
12655 * Identifies M2TS packet types and parses PES packets using metadata
12656 * parsed from the PMT
12657 **/
12658
12659
12660 this.push = function (data) {
12661 ({
12662 pat: function pat() {// we have to wait for the PMT to arrive as well before we
12663 // have any meaningful metadata
12664 },
12665 pes: function pes() {
12666 var stream, streamType;
12667
12668 switch (data.streamType) {
12669 case streamTypes.H264_STREAM_TYPE:
12670 stream = video;
12671 streamType = 'video';
12672 break;
12673
12674 case streamTypes.ADTS_STREAM_TYPE:
12675 stream = audio;
12676 streamType = 'audio';
12677 break;
12678
12679 case streamTypes.METADATA_STREAM_TYPE:
12680 stream = timedMetadata;
12681 streamType = 'timed-metadata';
12682 break;
12683
12684 default:
12685 // ignore unknown stream types
12686 return;
12687 } // if a new packet is starting, we can flush the completed
12688 // packet
12689
12690
12691 if (data.payloadUnitStartIndicator) {
12692 flushStream(stream, streamType, true);
12693 } // buffer this fragment until we are sure we've received the
12694 // complete payload
12695
12696
12697 stream.data.push(data);
12698 stream.size += data.data.byteLength;
12699 },
12700 pmt: function pmt() {
12701 var event = {
12702 type: 'metadata',
12703 tracks: []
12704 };
12705 programMapTable = data.programMapTable; // translate audio and video streams to tracks
12706
12707 if (programMapTable.video !== null) {
12708 event.tracks.push({
12709 timelineStartInfo: {
12710 baseMediaDecodeTime: 0
12711 },
12712 id: +programMapTable.video,
12713 codec: 'avc',
12714 type: 'video'
12715 });
12716 }
12717
12718 if (programMapTable.audio !== null) {
12719 event.tracks.push({
12720 timelineStartInfo: {
12721 baseMediaDecodeTime: 0
12722 },
12723 id: +programMapTable.audio,
12724 codec: 'adts',
12725 type: 'audio'
12726 });
12727 }
12728
12729 self.trigger('data', event);
12730 }
12731 })[data.type]();
12732 };
12733
12734 this.reset = function () {
12735 video.size = 0;
12736 video.data.length = 0;
12737 audio.size = 0;
12738 audio.data.length = 0;
12739 this.trigger('reset');
12740 };
12741 /**
12742 * Flush any remaining input. Video PES packets may be of variable
12743 * length. Normally, the start of a new video packet can trigger the
12744 * finalization of the previous packet. That is not possible if no
12745 * more video is forthcoming, however. In that case, some other
12746 * mechanism (like the end of the file) has to be employed. When it is
12747 * clear that no additional data is forthcoming, calling this method
12748 * will flush the buffered packets.
12749 */
12750
12751
12752 this.flushStreams_ = function () {
12753 // !!THIS ORDER IS IMPORTANT!!
12754 // video first then audio
12755 flushStream(video, 'video');
12756 flushStream(audio, 'audio');
12757 flushStream(timedMetadata, 'timed-metadata');
12758 };
12759
12760 this.flush = function () {
12761 this.flushStreams_();
12762 this.trigger('done');
12763 };
12764 };
12765
12766 _ElementaryStream.prototype = new stream();
12767 var m2ts = {
12768 PAT_PID: 0x0000,
12769 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
12770 TransportPacketStream: _TransportPacketStream,
12771 TransportParseStream: _TransportParseStream,
12772 ElementaryStream: _ElementaryStream,
12773 TimestampRolloverStream: TimestampRolloverStream,
12774 CaptionStream: captionStream.CaptionStream,
12775 Cea608Stream: captionStream.Cea608Stream,
12776 Cea708Stream: captionStream.Cea708Stream,
12777 MetadataStream: metadataStream
12778 };
12779
12780 for (var type in streamTypes) {
12781 if (streamTypes.hasOwnProperty(type)) {
12782 m2ts[type] = streamTypes[type];
12783 }
12784 }
12785
12786 var m2ts_1 = m2ts;
12787 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
12788
12789 var _AdtsStream;
12790
12791 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
12792 /*
12793 * Accepts a ElementaryStream and emits data events with parsed
12794 * AAC Audio Frames of the individual packets. Input audio in ADTS
12795 * format is unpacked and re-emitted as AAC frames.
12796 *
12797 * @see http://wiki.multimedia.cx/index.php?title=ADTS
12798 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
12799 */
12800
12801 _AdtsStream = function AdtsStream(handlePartialSegments) {
12802 var buffer,
12803 frameNum = 0;
12804
12805 _AdtsStream.prototype.init.call(this);
12806
12807 this.push = function (packet) {
12808 var i = 0,
12809 frameLength,
12810 protectionSkipBytes,
12811 frameEnd,
12812 oldBuffer,
12813 sampleCount,
12814 adtsFrameDuration;
12815
12816 if (!handlePartialSegments) {
12817 frameNum = 0;
12818 }
12819
12820 if (packet.type !== 'audio') {
12821 // ignore non-audio data
12822 return;
12823 } // Prepend any data in the buffer to the input data so that we can parse
12824 // aac frames the cross a PES packet boundary
12825
12826
12827 if (buffer) {
12828 oldBuffer = buffer;
12829 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
12830 buffer.set(oldBuffer);
12831 buffer.set(packet.data, oldBuffer.byteLength);
12832 } else {
12833 buffer = packet.data;
12834 } // unpack any ADTS frames which have been fully received
12835 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
12836
12837
12838 while (i + 5 < buffer.length) {
12839 // Look for the start of an ADTS header..
12840 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
12841 // If a valid header was not found, jump one forward and attempt to
12842 // find a valid ADTS header starting at the next byte
12843 i++;
12844 continue;
12845 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
12846 // end of the ADTS header
12847
12848
12849 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
12850 // end of the sync sequence
12851
12852 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
12853 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
12854 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2];
12855 frameEnd = i + frameLength; // If we don't have enough data to actually finish this ADTS frame, return
12856 // and wait for more data
12857
12858 if (buffer.byteLength < frameEnd) {
12859 return;
12860 } // Otherwise, deliver the complete AAC frame
12861
12862
12863 this.trigger('data', {
12864 pts: packet.pts + frameNum * adtsFrameDuration,
12865 dts: packet.dts + frameNum * adtsFrameDuration,
12866 sampleCount: sampleCount,
12867 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
12868 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
12869 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
12870 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
12871 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
12872 samplesize: 16,
12873 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
12874 });
12875 frameNum++; // If the buffer is empty, clear it and return
12876
12877 if (buffer.byteLength === frameEnd) {
12878 buffer = undefined;
12879 return;
12880 } // Remove the finished frame from the buffer and start the process again
12881
12882
12883 buffer = buffer.subarray(frameEnd);
12884 }
12885 };
12886
12887 this.flush = function () {
12888 frameNum = 0;
12889 this.trigger('done');
12890 };
12891
12892 this.reset = function () {
12893 buffer = void 0;
12894 this.trigger('reset');
12895 };
12896
12897 this.endTimeline = function () {
12898 buffer = void 0;
12899 this.trigger('endedtimeline');
12900 };
12901 };
12902
12903 _AdtsStream.prototype = new stream();
12904 var adts = _AdtsStream;
12905 /**
12906 * mux.js
12907 *
12908 * Copyright (c) Brightcove
12909 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
12910 */
12911
12912 var ExpGolomb;
12913 /**
12914 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
12915 * scheme used by h264.
12916 */
12917
12918 ExpGolomb = function ExpGolomb(workingData) {
12919 var // the number of bytes left to examine in workingData
12920 workingBytesAvailable = workingData.byteLength,
12921 // the current word being examined
12922 workingWord = 0,
12923 // :uint
12924 // the number of bits left to examine in the current word
12925 workingBitsAvailable = 0; // :uint;
12926 // ():uint
12927
12928 this.length = function () {
12929 return 8 * workingBytesAvailable;
12930 }; // ():uint
12931
12932
12933 this.bitsAvailable = function () {
12934 return 8 * workingBytesAvailable + workingBitsAvailable;
12935 }; // ():void
12936
12937
12938 this.loadWord = function () {
12939 var position = workingData.byteLength - workingBytesAvailable,
12940 workingBytes = new Uint8Array(4),
12941 availableBytes = Math.min(4, workingBytesAvailable);
12942
12943 if (availableBytes === 0) {
12944 throw new Error('no bytes available');
12945 }
12946
12947 workingBytes.set(workingData.subarray(position, position + availableBytes));
12948 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
12949
12950 workingBitsAvailable = availableBytes * 8;
12951 workingBytesAvailable -= availableBytes;
12952 }; // (count:int):void
12953
12954
12955 this.skipBits = function (count) {
12956 var skipBytes; // :int
12957
12958 if (workingBitsAvailable > count) {
12959 workingWord <<= count;
12960 workingBitsAvailable -= count;
12961 } else {
12962 count -= workingBitsAvailable;
12963 skipBytes = Math.floor(count / 8);
12964 count -= skipBytes * 8;
12965 workingBytesAvailable -= skipBytes;
12966 this.loadWord();
12967 workingWord <<= count;
12968 workingBitsAvailable -= count;
12969 }
12970 }; // (size:int):uint
12971
12972
12973 this.readBits = function (size) {
12974 var bits = Math.min(workingBitsAvailable, size),
12975 // :uint
12976 valu = workingWord >>> 32 - bits; // :uint
12977 // if size > 31, handle error
12978
12979 workingBitsAvailable -= bits;
12980
12981 if (workingBitsAvailable > 0) {
12982 workingWord <<= bits;
12983 } else if (workingBytesAvailable > 0) {
12984 this.loadWord();
12985 }
12986
12987 bits = size - bits;
12988
12989 if (bits > 0) {
12990 return valu << bits | this.readBits(bits);
12991 }
12992
12993 return valu;
12994 }; // ():uint
12995
12996
12997 this.skipLeadingZeros = function () {
12998 var leadingZeroCount; // :uint
12999
13000 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
13001 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
13002 // the first bit of working word is 1
13003 workingWord <<= leadingZeroCount;
13004 workingBitsAvailable -= leadingZeroCount;
13005 return leadingZeroCount;
13006 }
13007 } // we exhausted workingWord and still have not found a 1
13008
13009
13010 this.loadWord();
13011 return leadingZeroCount + this.skipLeadingZeros();
13012 }; // ():void
13013
13014
13015 this.skipUnsignedExpGolomb = function () {
13016 this.skipBits(1 + this.skipLeadingZeros());
13017 }; // ():void
13018
13019
13020 this.skipExpGolomb = function () {
13021 this.skipBits(1 + this.skipLeadingZeros());
13022 }; // ():uint
13023
13024
13025 this.readUnsignedExpGolomb = function () {
13026 var clz = this.skipLeadingZeros(); // :uint
13027
13028 return this.readBits(clz + 1) - 1;
13029 }; // ():int
13030
13031
13032 this.readExpGolomb = function () {
13033 var valu = this.readUnsignedExpGolomb(); // :int
13034
13035 if (0x01 & valu) {
13036 // the number is odd if the low order bit is set
13037 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
13038 }
13039
13040 return -1 * (valu >>> 1); // divide by two then make it negative
13041 }; // Some convenience functions
13042 // :Boolean
13043
13044
13045 this.readBoolean = function () {
13046 return this.readBits(1) === 1;
13047 }; // ():int
13048
13049
13050 this.readUnsignedByte = function () {
13051 return this.readBits(8);
13052 };
13053
13054 this.loadWord();
13055 };
13056
13057 var expGolomb = ExpGolomb;
13058
13059 var _H264Stream, _NalByteStream;
13060
13061 var PROFILES_WITH_OPTIONAL_SPS_DATA;
13062 /**
13063 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
13064 */
13065
13066 _NalByteStream = function NalByteStream() {
13067 var syncPoint = 0,
13068 i,
13069 buffer;
13070
13071 _NalByteStream.prototype.init.call(this);
13072 /*
13073 * Scans a byte stream and triggers a data event with the NAL units found.
13074 * @param {Object} data Event received from H264Stream
13075 * @param {Uint8Array} data.data The h264 byte stream to be scanned
13076 *
13077 * @see H264Stream.push
13078 */
13079
13080
13081 this.push = function (data) {
13082 var swapBuffer;
13083
13084 if (!buffer) {
13085 buffer = data.data;
13086 } else {
13087 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
13088 swapBuffer.set(buffer);
13089 swapBuffer.set(data.data, buffer.byteLength);
13090 buffer = swapBuffer;
13091 }
13092
13093 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
13094 // scan for NAL unit boundaries
13095 // a match looks like this:
13096 // 0 0 1 .. NAL .. 0 0 1
13097 // ^ sync point ^ i
13098 // or this:
13099 // 0 0 1 .. NAL .. 0 0 0
13100 // ^ sync point ^ i
13101 // advance the sync point to a NAL start, if necessary
13102
13103 for (; syncPoint < len - 3; syncPoint++) {
13104 if (buffer[syncPoint + 2] === 1) {
13105 // the sync point is properly aligned
13106 i = syncPoint + 5;
13107 break;
13108 }
13109 }
13110
13111 while (i < len) {
13112 // look at the current byte to determine if we've hit the end of
13113 // a NAL unit boundary
13114 switch (buffer[i]) {
13115 case 0:
13116 // skip past non-sync sequences
13117 if (buffer[i - 1] !== 0) {
13118 i += 2;
13119 break;
13120 } else if (buffer[i - 2] !== 0) {
13121 i++;
13122 break;
13123 } // deliver the NAL unit if it isn't empty
13124
13125
13126 if (syncPoint + 3 !== i - 2) {
13127 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
13128 } // drop trailing zeroes
13129
13130
13131 do {
13132 i++;
13133 } while (buffer[i] !== 1 && i < len);
13134
13135 syncPoint = i - 2;
13136 i += 3;
13137 break;
13138
13139 case 1:
13140 // skip past non-sync sequences
13141 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
13142 i += 3;
13143 break;
13144 } // deliver the NAL unit
13145
13146
13147 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
13148 syncPoint = i - 2;
13149 i += 3;
13150 break;
13151
13152 default:
13153 // the current byte isn't a one or zero, so it cannot be part
13154 // of a sync sequence
13155 i += 3;
13156 break;
13157 }
13158 } // filter out the NAL units that were delivered
13159
13160
13161 buffer = buffer.subarray(syncPoint);
13162 i -= syncPoint;
13163 syncPoint = 0;
13164 };
13165
13166 this.reset = function () {
13167 buffer = null;
13168 syncPoint = 0;
13169 this.trigger('reset');
13170 };
13171
13172 this.flush = function () {
13173 // deliver the last buffered NAL unit
13174 if (buffer && buffer.byteLength > 3) {
13175 this.trigger('data', buffer.subarray(syncPoint + 3));
13176 } // reset the stream state
13177
13178
13179 buffer = null;
13180 syncPoint = 0;
13181 this.trigger('done');
13182 };
13183
13184 this.endTimeline = function () {
13185 this.flush();
13186 this.trigger('endedtimeline');
13187 };
13188 };
13189
13190 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
13191 // see Recommendation ITU-T H.264 (4/2013),
13192 // 7.3.2.1.1 Sequence parameter set data syntax
13193
13194 PROFILES_WITH_OPTIONAL_SPS_DATA = {
13195 100: true,
13196 110: true,
13197 122: true,
13198 244: true,
13199 44: true,
13200 83: true,
13201 86: true,
13202 118: true,
13203 128: true,
13204 138: true,
13205 139: true,
13206 134: true
13207 };
13208 /**
13209 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
13210 * events.
13211 */
13212
13213 _H264Stream = function H264Stream() {
13214 var nalByteStream = new _NalByteStream(),
13215 self,
13216 trackId,
13217 currentPts,
13218 currentDts,
13219 discardEmulationPreventionBytes,
13220 readSequenceParameterSet,
13221 skipScalingList;
13222
13223 _H264Stream.prototype.init.call(this);
13224
13225 self = this;
13226 /*
13227 * Pushes a packet from a stream onto the NalByteStream
13228 *
13229 * @param {Object} packet - A packet received from a stream
13230 * @param {Uint8Array} packet.data - The raw bytes of the packet
13231 * @param {Number} packet.dts - Decode timestamp of the packet
13232 * @param {Number} packet.pts - Presentation timestamp of the packet
13233 * @param {Number} packet.trackId - The id of the h264 track this packet came from
13234 * @param {('video'|'audio')} packet.type - The type of packet
13235 *
13236 */
13237
13238 this.push = function (packet) {
13239 if (packet.type !== 'video') {
13240 return;
13241 }
13242
13243 trackId = packet.trackId;
13244 currentPts = packet.pts;
13245 currentDts = packet.dts;
13246 nalByteStream.push(packet);
13247 };
13248 /*
13249 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
13250 * for the NALUs to the next stream component.
13251 * Also, preprocess caption and sequence parameter NALUs.
13252 *
13253 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
13254 * @see NalByteStream.push
13255 */
13256
13257
13258 nalByteStream.on('data', function (data) {
13259 var event = {
13260 trackId: trackId,
13261 pts: currentPts,
13262 dts: currentDts,
13263 data: data
13264 };
13265
13266 switch (data[0] & 0x1f) {
13267 case 0x05:
13268 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
13269 break;
13270
13271 case 0x06:
13272 event.nalUnitType = 'sei_rbsp';
13273 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
13274 break;
13275
13276 case 0x07:
13277 event.nalUnitType = 'seq_parameter_set_rbsp';
13278 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
13279 event.config = readSequenceParameterSet(event.escapedRBSP);
13280 break;
13281
13282 case 0x08:
13283 event.nalUnitType = 'pic_parameter_set_rbsp';
13284 break;
13285
13286 case 0x09:
13287 event.nalUnitType = 'access_unit_delimiter_rbsp';
13288 break;
13289 } // This triggers data on the H264Stream
13290
13291
13292 self.trigger('data', event);
13293 });
13294 nalByteStream.on('done', function () {
13295 self.trigger('done');
13296 });
13297 nalByteStream.on('partialdone', function () {
13298 self.trigger('partialdone');
13299 });
13300 nalByteStream.on('reset', function () {
13301 self.trigger('reset');
13302 });
13303 nalByteStream.on('endedtimeline', function () {
13304 self.trigger('endedtimeline');
13305 });
13306
13307 this.flush = function () {
13308 nalByteStream.flush();
13309 };
13310
13311 this.partialFlush = function () {
13312 nalByteStream.partialFlush();
13313 };
13314
13315 this.reset = function () {
13316 nalByteStream.reset();
13317 };
13318
13319 this.endTimeline = function () {
13320 nalByteStream.endTimeline();
13321 };
13322 /**
13323 * Advance the ExpGolomb decoder past a scaling list. The scaling
13324 * list is optionally transmitted as part of a sequence parameter
13325 * set and is not relevant to transmuxing.
13326 * @param count {number} the number of entries in this scaling list
13327 * @param expGolombDecoder {object} an ExpGolomb pointed to the
13328 * start of a scaling list
13329 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
13330 */
13331
13332
13333 skipScalingList = function skipScalingList(count, expGolombDecoder) {
13334 var lastScale = 8,
13335 nextScale = 8,
13336 j,
13337 deltaScale;
13338
13339 for (j = 0; j < count; j++) {
13340 if (nextScale !== 0) {
13341 deltaScale = expGolombDecoder.readExpGolomb();
13342 nextScale = (lastScale + deltaScale + 256) % 256;
13343 }
13344
13345 lastScale = nextScale === 0 ? lastScale : nextScale;
13346 }
13347 };
13348 /**
13349 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
13350 * Sequence Payload"
13351 * @param data {Uint8Array} the bytes of a RBSP from a NAL
13352 * unit
13353 * @return {Uint8Array} the RBSP without any Emulation
13354 * Prevention Bytes
13355 */
13356
13357
13358 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
13359 var length = data.byteLength,
13360 emulationPreventionBytesPositions = [],
13361 i = 1,
13362 newLength,
13363 newData; // Find all `Emulation Prevention Bytes`
13364
13365 while (i < length - 2) {
13366 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
13367 emulationPreventionBytesPositions.push(i + 2);
13368 i += 2;
13369 } else {
13370 i++;
13371 }
13372 } // If no Emulation Prevention Bytes were found just return the original
13373 // array
13374
13375
13376 if (emulationPreventionBytesPositions.length === 0) {
13377 return data;
13378 } // Create a new array to hold the NAL unit data
13379
13380
13381 newLength = length - emulationPreventionBytesPositions.length;
13382 newData = new Uint8Array(newLength);
13383 var sourceIndex = 0;
13384
13385 for (i = 0; i < newLength; sourceIndex++, i++) {
13386 if (sourceIndex === emulationPreventionBytesPositions[0]) {
13387 // Skip this byte
13388 sourceIndex++; // Remove this position index
13389
13390 emulationPreventionBytesPositions.shift();
13391 }
13392
13393 newData[i] = data[sourceIndex];
13394 }
13395
13396 return newData;
13397 };
13398 /**
13399 * Read a sequence parameter set and return some interesting video
13400 * properties. A sequence parameter set is the H264 metadata that
13401 * describes the properties of upcoming video frames.
13402 * @param data {Uint8Array} the bytes of a sequence parameter set
13403 * @return {object} an object with configuration parsed from the
13404 * sequence parameter set, including the dimensions of the
13405 * associated video frames.
13406 */
13407
13408
13409 readSequenceParameterSet = function readSequenceParameterSet(data) {
13410 var frameCropLeftOffset = 0,
13411 frameCropRightOffset = 0,
13412 frameCropTopOffset = 0,
13413 frameCropBottomOffset = 0,
13414 sarScale = 1,
13415 expGolombDecoder,
13416 profileIdc,
13417 levelIdc,
13418 profileCompatibility,
13419 chromaFormatIdc,
13420 picOrderCntType,
13421 numRefFramesInPicOrderCntCycle,
13422 picWidthInMbsMinus1,
13423 picHeightInMapUnitsMinus1,
13424 frameMbsOnlyFlag,
13425 scalingListCount,
13426 sarRatio,
13427 aspectRatioIdc,
13428 i;
13429 expGolombDecoder = new expGolomb(data);
13430 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
13431
13432 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
13433
13434 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
13435
13436 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
13437 // some profiles have more optional data we don't need
13438
13439 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
13440 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
13441
13442 if (chromaFormatIdc === 3) {
13443 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
13444 }
13445
13446 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
13447
13448 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
13449
13450 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
13451
13452 if (expGolombDecoder.readBoolean()) {
13453 // seq_scaling_matrix_present_flag
13454 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
13455
13456 for (i = 0; i < scalingListCount; i++) {
13457 if (expGolombDecoder.readBoolean()) {
13458 // seq_scaling_list_present_flag[ i ]
13459 if (i < 6) {
13460 skipScalingList(16, expGolombDecoder);
13461 } else {
13462 skipScalingList(64, expGolombDecoder);
13463 }
13464 }
13465 }
13466 }
13467 }
13468
13469 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
13470
13471 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
13472
13473 if (picOrderCntType === 0) {
13474 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
13475 } else if (picOrderCntType === 1) {
13476 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
13477
13478 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
13479
13480 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
13481
13482 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
13483
13484 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
13485 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
13486 }
13487 }
13488
13489 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
13490
13491 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
13492
13493 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
13494 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
13495 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
13496
13497 if (frameMbsOnlyFlag === 0) {
13498 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
13499 }
13500
13501 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
13502
13503 if (expGolombDecoder.readBoolean()) {
13504 // frame_cropping_flag
13505 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
13506 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
13507 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
13508 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
13509 }
13510
13511 if (expGolombDecoder.readBoolean()) {
13512 // vui_parameters_present_flag
13513 if (expGolombDecoder.readBoolean()) {
13514 // aspect_ratio_info_present_flag
13515 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
13516
13517 switch (aspectRatioIdc) {
13518 case 1:
13519 sarRatio = [1, 1];
13520 break;
13521
13522 case 2:
13523 sarRatio = [12, 11];
13524 break;
13525
13526 case 3:
13527 sarRatio = [10, 11];
13528 break;
13529
13530 case 4:
13531 sarRatio = [16, 11];
13532 break;
13533
13534 case 5:
13535 sarRatio = [40, 33];
13536 break;
13537
13538 case 6:
13539 sarRatio = [24, 11];
13540 break;
13541
13542 case 7:
13543 sarRatio = [20, 11];
13544 break;
13545
13546 case 8:
13547 sarRatio = [32, 11];
13548 break;
13549
13550 case 9:
13551 sarRatio = [80, 33];
13552 break;
13553
13554 case 10:
13555 sarRatio = [18, 11];
13556 break;
13557
13558 case 11:
13559 sarRatio = [15, 11];
13560 break;
13561
13562 case 12:
13563 sarRatio = [64, 33];
13564 break;
13565
13566 case 13:
13567 sarRatio = [160, 99];
13568 break;
13569
13570 case 14:
13571 sarRatio = [4, 3];
13572 break;
13573
13574 case 15:
13575 sarRatio = [3, 2];
13576 break;
13577
13578 case 16:
13579 sarRatio = [2, 1];
13580 break;
13581
13582 case 255:
13583 {
13584 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
13585 break;
13586 }
13587 }
13588
13589 if (sarRatio) {
13590 sarScale = sarRatio[0] / sarRatio[1];
13591 }
13592 }
13593 }
13594
13595 return {
13596 profileIdc: profileIdc,
13597 levelIdc: levelIdc,
13598 profileCompatibility: profileCompatibility,
13599 width: Math.ceil(((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
13600 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
13601 sarRatio: sarRatio
13602 };
13603 };
13604 };
13605
13606 _H264Stream.prototype = new stream();
13607 var h264 = {
13608 H264Stream: _H264Stream,
13609 NalByteStream: _NalByteStream
13610 };
13611 /**
13612 * mux.js
13613 *
13614 * Copyright (c) Brightcove
13615 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
13616 *
13617 * Utilities to detect basic properties and metadata about Aac data.
13618 */
13619
13620 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
13621
13622 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
13623 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
13624 flags = header[byteIndex + 5],
13625 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
13626
13627 returnSize = returnSize >= 0 ? returnSize : 0;
13628
13629 if (footerPresent) {
13630 return returnSize + 20;
13631 }
13632
13633 return returnSize + 10;
13634 };
13635
13636 var getId3Offset = function getId3Offset(data, offset) {
13637 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
13638 return offset;
13639 }
13640
13641 offset += parseId3TagSize(data, offset);
13642 return getId3Offset(data, offset);
13643 }; // TODO: use vhs-utils
13644
13645
13646 var isLikelyAacData$2 = function isLikelyAacData(data) {
13647 var offset = getId3Offset(data, 0);
13648 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
13649 // is not mp3 data but aac data.
13650 (data[offset + 1] & 0x16) === 0x10;
13651 };
13652
13653 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
13654 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
13655 }; // return a percent-encoded representation of the specified byte range
13656 // @see http://en.wikipedia.org/wiki/Percent-encoding
13657
13658
13659 var percentEncode = function percentEncode(bytes, start, end) {
13660 var i,
13661 result = '';
13662
13663 for (i = start; i < end; i++) {
13664 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
13665 }
13666
13667 return result;
13668 }; // return the string representation of the specified byte range,
13669 // interpreted as ISO-8859-1.
13670
13671
13672 var parseIso88591 = function parseIso88591(bytes, start, end) {
13673 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
13674 };
13675
13676 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
13677 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
13678 middle = header[byteIndex + 4] << 3,
13679 highTwo = header[byteIndex + 3] & 0x3 << 11;
13680 return highTwo | middle | lowThree;
13681 };
13682
13683 var parseType$1 = function parseType(header, byteIndex) {
13684 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
13685 return 'timed-metadata';
13686 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
13687 return 'audio';
13688 }
13689
13690 return null;
13691 };
13692
13693 var parseSampleRate = function parseSampleRate(packet) {
13694 var i = 0;
13695
13696 while (i + 5 < packet.length) {
13697 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
13698 // If a valid header was not found, jump one forward and attempt to
13699 // find a valid ADTS header starting at the next byte
13700 i++;
13701 continue;
13702 }
13703
13704 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
13705 }
13706
13707 return null;
13708 };
13709
13710 var parseAacTimestamp = function parseAacTimestamp(packet) {
13711 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
13712
13713 frameStart = 10;
13714
13715 if (packet[5] & 0x40) {
13716 // advance the frame start past the extended header
13717 frameStart += 4; // header size field
13718
13719 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
13720 } // parse one or more ID3 frames
13721 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
13722
13723
13724 do {
13725 // determine the number of bytes in this frame
13726 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
13727
13728 if (frameSize < 1) {
13729 return null;
13730 }
13731
13732 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
13733
13734 if (frameHeader === 'PRIV') {
13735 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
13736
13737 for (var i = 0; i < frame.byteLength; i++) {
13738 if (frame[i] === 0) {
13739 var owner = parseIso88591(frame, 0, i);
13740
13741 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
13742 var d = frame.subarray(i + 1);
13743 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
13744 size *= 4;
13745 size += d[7] & 0x03;
13746 return size;
13747 }
13748
13749 break;
13750 }
13751 }
13752 }
13753
13754 frameStart += 10; // advance past the frame header
13755
13756 frameStart += frameSize; // advance past the frame body
13757 } while (frameStart < packet.byteLength);
13758
13759 return null;
13760 };
13761
13762 var utils = {
13763 isLikelyAacData: isLikelyAacData$2,
13764 parseId3TagSize: parseId3TagSize,
13765 parseAdtsSize: parseAdtsSize,
13766 parseType: parseType$1,
13767 parseSampleRate: parseSampleRate,
13768 parseAacTimestamp: parseAacTimestamp
13769 };
13770
13771 var _AacStream;
13772 /**
13773 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
13774 */
13775
13776
13777 _AacStream = function AacStream() {
13778 var everything = new Uint8Array(),
13779 timeStamp = 0;
13780
13781 _AacStream.prototype.init.call(this);
13782
13783 this.setTimestamp = function (timestamp) {
13784 timeStamp = timestamp;
13785 };
13786
13787 this.push = function (bytes) {
13788 var frameSize = 0,
13789 byteIndex = 0,
13790 bytesLeft,
13791 chunk,
13792 packet,
13793 tempLength; // If there are bytes remaining from the last segment, prepend them to the
13794 // bytes that were pushed in
13795
13796 if (everything.length) {
13797 tempLength = everything.length;
13798 everything = new Uint8Array(bytes.byteLength + tempLength);
13799 everything.set(everything.subarray(0, tempLength));
13800 everything.set(bytes, tempLength);
13801 } else {
13802 everything = bytes;
13803 }
13804
13805 while (everything.length - byteIndex >= 3) {
13806 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
13807 // Exit early because we don't have enough to parse
13808 // the ID3 tag header
13809 if (everything.length - byteIndex < 10) {
13810 break;
13811 } // check framesize
13812
13813
13814 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
13815 // to emit a full packet
13816 // Add to byteIndex to support multiple ID3 tags in sequence
13817
13818 if (byteIndex + frameSize > everything.length) {
13819 break;
13820 }
13821
13822 chunk = {
13823 type: 'timed-metadata',
13824 data: everything.subarray(byteIndex, byteIndex + frameSize)
13825 };
13826 this.trigger('data', chunk);
13827 byteIndex += frameSize;
13828 continue;
13829 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
13830 // Exit early because we don't have enough to parse
13831 // the ADTS frame header
13832 if (everything.length - byteIndex < 7) {
13833 break;
13834 }
13835
13836 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
13837 // to emit a full packet
13838
13839 if (byteIndex + frameSize > everything.length) {
13840 break;
13841 }
13842
13843 packet = {
13844 type: 'audio',
13845 data: everything.subarray(byteIndex, byteIndex + frameSize),
13846 pts: timeStamp,
13847 dts: timeStamp
13848 };
13849 this.trigger('data', packet);
13850 byteIndex += frameSize;
13851 continue;
13852 }
13853
13854 byteIndex++;
13855 }
13856
13857 bytesLeft = everything.length - byteIndex;
13858
13859 if (bytesLeft > 0) {
13860 everything = everything.subarray(byteIndex);
13861 } else {
13862 everything = new Uint8Array();
13863 }
13864 };
13865
13866 this.reset = function () {
13867 everything = new Uint8Array();
13868 this.trigger('reset');
13869 };
13870
13871 this.endTimeline = function () {
13872 everything = new Uint8Array();
13873 this.trigger('endedtimeline');
13874 };
13875 };
13876
13877 _AacStream.prototype = new stream();
13878 var aac = _AacStream; // constants
13879
13880 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
13881 var audioProperties = AUDIO_PROPERTIES;
13882 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
13883 var videoProperties = VIDEO_PROPERTIES;
13884 var H264Stream = h264.H264Stream;
13885 var isLikelyAacData$1 = utils.isLikelyAacData;
13886 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
13887
13888 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
13889 /**
13890 * Compare two arrays (even typed) for same-ness
13891 */
13892
13893
13894 var arrayEquals = function arrayEquals(a, b) {
13895 var i;
13896
13897 if (a.length !== b.length) {
13898 return false;
13899 } // compare the value of each element in the array
13900
13901
13902 for (i = 0; i < a.length; i++) {
13903 if (a[i] !== b[i]) {
13904 return false;
13905 }
13906 }
13907
13908 return true;
13909 };
13910
13911 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
13912 var ptsOffsetFromDts = startPts - startDts,
13913 decodeDuration = endDts - startDts,
13914 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
13915 // however, the player time values will reflect a start from the baseMediaDecodeTime.
13916 // In order to provide relevant values for the player times, base timing info on the
13917 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
13918
13919 return {
13920 start: {
13921 dts: baseMediaDecodeTime,
13922 pts: baseMediaDecodeTime + ptsOffsetFromDts
13923 },
13924 end: {
13925 dts: baseMediaDecodeTime + decodeDuration,
13926 pts: baseMediaDecodeTime + presentationDuration
13927 },
13928 prependedContentDuration: prependedContentDuration,
13929 baseMediaDecodeTime: baseMediaDecodeTime
13930 };
13931 };
13932 /**
13933 * Constructs a single-track, ISO BMFF media segment from AAC data
13934 * events. The output of this stream can be fed to a SourceBuffer
13935 * configured with a suitable initialization segment.
13936 * @param track {object} track metadata configuration
13937 * @param options {object} transmuxer options object
13938 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
13939 * in the source; false to adjust the first segment to start at 0.
13940 */
13941
13942
13943 _AudioSegmentStream = function AudioSegmentStream(track, options) {
13944 var adtsFrames = [],
13945 sequenceNumber = 0,
13946 earliestAllowedDts = 0,
13947 audioAppendStartTs = 0,
13948 videoBaseMediaDecodeTime = Infinity;
13949 options = options || {};
13950
13951 _AudioSegmentStream.prototype.init.call(this);
13952
13953 this.push = function (data) {
13954 trackDecodeInfo.collectDtsInfo(track, data);
13955
13956 if (track) {
13957 audioProperties.forEach(function (prop) {
13958 track[prop] = data[prop];
13959 });
13960 } // buffer audio data until end() is called
13961
13962
13963 adtsFrames.push(data);
13964 };
13965
13966 this.setEarliestDts = function (earliestDts) {
13967 earliestAllowedDts = earliestDts;
13968 };
13969
13970 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
13971 videoBaseMediaDecodeTime = baseMediaDecodeTime;
13972 };
13973
13974 this.setAudioAppendStart = function (timestamp) {
13975 audioAppendStartTs = timestamp;
13976 };
13977
13978 this.flush = function () {
13979 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
13980
13981 if (adtsFrames.length === 0) {
13982 this.trigger('done', 'AudioSegmentStream');
13983 return;
13984 }
13985
13986 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
13987 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
13988
13989 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
13990 // samples (that is, adts frames) in the audio data
13991
13992 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
13993
13994 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
13995 adtsFrames = [];
13996 moof = mp4Generator.moof(sequenceNumber, [track]);
13997 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
13998
13999 sequenceNumber++;
14000 boxes.set(moof);
14001 boxes.set(mdat, moof.byteLength);
14002 trackDecodeInfo.clearDtsInfo(track);
14003 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
14004 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
14005 // valid use-case where an init segment/data should be triggered without associated
14006 // frames. Leaving for now, but should be looked into.
14007
14008 if (frames.length) {
14009 segmentDuration = frames.length * frameDuration;
14010 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
14011 // frame info is in video clock cycles. Convert to match expectation of
14012 // listeners (that all timestamps will be based on video clock cycles).
14013 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
14014 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
14015 this.trigger('timingInfo', {
14016 start: frames[0].pts,
14017 end: frames[0].pts + segmentDuration
14018 });
14019 }
14020
14021 this.trigger('data', {
14022 track: track,
14023 boxes: boxes
14024 });
14025 this.trigger('done', 'AudioSegmentStream');
14026 };
14027
14028 this.reset = function () {
14029 trackDecodeInfo.clearDtsInfo(track);
14030 adtsFrames = [];
14031 this.trigger('reset');
14032 };
14033 };
14034
14035 _AudioSegmentStream.prototype = new stream();
14036 /**
14037 * Constructs a single-track, ISO BMFF media segment from H264 data
14038 * events. The output of this stream can be fed to a SourceBuffer
14039 * configured with a suitable initialization segment.
14040 * @param track {object} track metadata configuration
14041 * @param options {object} transmuxer options object
14042 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
14043 * gopsToAlignWith list when attempting to align gop pts
14044 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
14045 * in the source; false to adjust the first segment to start at 0.
14046 */
14047
14048 _VideoSegmentStream = function VideoSegmentStream(track, options) {
14049 var sequenceNumber = 0,
14050 nalUnits = [],
14051 gopsToAlignWith = [],
14052 config,
14053 pps;
14054 options = options || {};
14055
14056 _VideoSegmentStream.prototype.init.call(this);
14057
14058 delete track.minPTS;
14059 this.gopCache_ = [];
14060 /**
14061 * Constructs a ISO BMFF segment given H264 nalUnits
14062 * @param {Object} nalUnit A data event representing a nalUnit
14063 * @param {String} nalUnit.nalUnitType
14064 * @param {Object} nalUnit.config Properties for a mp4 track
14065 * @param {Uint8Array} nalUnit.data The nalUnit bytes
14066 * @see lib/codecs/h264.js
14067 **/
14068
14069 this.push = function (nalUnit) {
14070 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
14071
14072 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
14073 config = nalUnit.config;
14074 track.sps = [nalUnit.data];
14075 videoProperties.forEach(function (prop) {
14076 track[prop] = config[prop];
14077 }, this);
14078 }
14079
14080 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
14081 pps = nalUnit.data;
14082 track.pps = [nalUnit.data];
14083 } // buffer video until flush() is called
14084
14085
14086 nalUnits.push(nalUnit);
14087 };
14088 /**
14089 * Pass constructed ISO BMFF track and boxes on to the
14090 * next stream in the pipeline
14091 **/
14092
14093
14094 this.flush = function () {
14095 var frames,
14096 gopForFusion,
14097 gops,
14098 moof,
14099 mdat,
14100 boxes,
14101 prependedContentDuration = 0,
14102 firstGop,
14103 lastGop; // Throw away nalUnits at the start of the byte stream until
14104 // we find the first AUD
14105
14106 while (nalUnits.length) {
14107 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
14108 break;
14109 }
14110
14111 nalUnits.shift();
14112 } // Return early if no video data has been observed
14113
14114
14115 if (nalUnits.length === 0) {
14116 this.resetStream_();
14117 this.trigger('done', 'VideoSegmentStream');
14118 return;
14119 } // Organize the raw nal-units into arrays that represent
14120 // higher-level constructs such as frames and gops
14121 // (group-of-pictures)
14122
14123
14124 frames = frameUtils.groupNalsIntoFrames(nalUnits);
14125 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
14126 // a problem since MSE (on Chrome) requires a leading keyframe.
14127 //
14128 // We have two approaches to repairing this situation:
14129 // 1) GOP-FUSION:
14130 // This is where we keep track of the GOPS (group-of-pictures)
14131 // from previous fragments and attempt to find one that we can
14132 // prepend to the current fragment in order to create a valid
14133 // fragment.
14134 // 2) KEYFRAME-PULLING:
14135 // Here we search for the first keyframe in the fragment and
14136 // throw away all the frames between the start of the fragment
14137 // and that keyframe. We then extend the duration and pull the
14138 // PTS of the keyframe forward so that it covers the time range
14139 // of the frames that were disposed of.
14140 //
14141 // #1 is far prefereable over #2 which can cause "stuttering" but
14142 // requires more things to be just right.
14143
14144 if (!gops[0][0].keyFrame) {
14145 // Search for a gop for fusion from our gopCache
14146 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
14147
14148 if (gopForFusion) {
14149 // in order to provide more accurate timing information about the segment, save
14150 // the number of seconds prepended to the original segment due to GOP fusion
14151 prependedContentDuration = gopForFusion.duration;
14152 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
14153 // new gop at the beginning
14154
14155 gops.byteLength += gopForFusion.byteLength;
14156 gops.nalCount += gopForFusion.nalCount;
14157 gops.pts = gopForFusion.pts;
14158 gops.dts = gopForFusion.dts;
14159 gops.duration += gopForFusion.duration;
14160 } else {
14161 // If we didn't find a candidate gop fall back to keyframe-pulling
14162 gops = frameUtils.extendFirstKeyFrame(gops);
14163 }
14164 } // Trim gops to align with gopsToAlignWith
14165
14166
14167 if (gopsToAlignWith.length) {
14168 var alignedGops;
14169
14170 if (options.alignGopsAtEnd) {
14171 alignedGops = this.alignGopsAtEnd_(gops);
14172 } else {
14173 alignedGops = this.alignGopsAtStart_(gops);
14174 }
14175
14176 if (!alignedGops) {
14177 // save all the nals in the last GOP into the gop cache
14178 this.gopCache_.unshift({
14179 gop: gops.pop(),
14180 pps: track.pps,
14181 sps: track.sps
14182 }); // Keep a maximum of 6 GOPs in the cache
14183
14184 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
14185
14186 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
14187
14188 this.resetStream_();
14189 this.trigger('done', 'VideoSegmentStream');
14190 return;
14191 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
14192 // when recalculated before sending off to CoalesceStream
14193
14194
14195 trackDecodeInfo.clearDtsInfo(track);
14196 gops = alignedGops;
14197 }
14198
14199 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
14200 // samples (that is, frames) in the video data
14201
14202 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
14203
14204 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
14205 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
14206 this.trigger('processedGopsInfo', gops.map(function (gop) {
14207 return {
14208 pts: gop.pts,
14209 dts: gop.dts,
14210 byteLength: gop.byteLength
14211 };
14212 }));
14213 firstGop = gops[0];
14214 lastGop = gops[gops.length - 1];
14215 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
14216 this.trigger('timingInfo', {
14217 start: gops[0].pts,
14218 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
14219 }); // save all the nals in the last GOP into the gop cache
14220
14221 this.gopCache_.unshift({
14222 gop: gops.pop(),
14223 pps: track.pps,
14224 sps: track.sps
14225 }); // Keep a maximum of 6 GOPs in the cache
14226
14227 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
14228
14229 nalUnits = [];
14230 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
14231 this.trigger('timelineStartInfo', track.timelineStartInfo);
14232 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
14233 // throwing away hundreds of media segment fragments
14234
14235 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
14236
14237 sequenceNumber++;
14238 boxes.set(moof);
14239 boxes.set(mdat, moof.byteLength);
14240 this.trigger('data', {
14241 track: track,
14242 boxes: boxes
14243 });
14244 this.resetStream_(); // Continue with the flush process now
14245
14246 this.trigger('done', 'VideoSegmentStream');
14247 };
14248
14249 this.reset = function () {
14250 this.resetStream_();
14251 nalUnits = [];
14252 this.gopCache_.length = 0;
14253 gopsToAlignWith.length = 0;
14254 this.trigger('reset');
14255 };
14256
14257 this.resetStream_ = function () {
14258 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
14259 // for instance, when we are rendition switching
14260
14261 config = undefined;
14262 pps = undefined;
14263 }; // Search for a candidate Gop for gop-fusion from the gop cache and
14264 // return it or return null if no good candidate was found
14265
14266
14267 this.getGopForFusion_ = function (nalUnit) {
14268 var halfSecond = 45000,
14269 // Half-a-second in a 90khz clock
14270 allowableOverlap = 10000,
14271 // About 3 frames @ 30fps
14272 nearestDistance = Infinity,
14273 dtsDistance,
14274 nearestGopObj,
14275 currentGop,
14276 currentGopObj,
14277 i; // Search for the GOP nearest to the beginning of this nal unit
14278
14279 for (i = 0; i < this.gopCache_.length; i++) {
14280 currentGopObj = this.gopCache_[i];
14281 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
14282
14283 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
14284 continue;
14285 } // Reject Gops that would require a negative baseMediaDecodeTime
14286
14287
14288 if (currentGop.dts < track.timelineStartInfo.dts) {
14289 continue;
14290 } // The distance between the end of the gop and the start of the nalUnit
14291
14292
14293 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
14294 // a half-second of the nal unit
14295
14296 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
14297 // Always use the closest GOP we found if there is more than
14298 // one candidate
14299 if (!nearestGopObj || nearestDistance > dtsDistance) {
14300 nearestGopObj = currentGopObj;
14301 nearestDistance = dtsDistance;
14302 }
14303 }
14304 }
14305
14306 if (nearestGopObj) {
14307 return nearestGopObj.gop;
14308 }
14309
14310 return null;
14311 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
14312 // of gopsToAlignWith starting from the START of the list
14313
14314
14315 this.alignGopsAtStart_ = function (gops) {
14316 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
14317 byteLength = gops.byteLength;
14318 nalCount = gops.nalCount;
14319 duration = gops.duration;
14320 alignIndex = gopIndex = 0;
14321
14322 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
14323 align = gopsToAlignWith[alignIndex];
14324 gop = gops[gopIndex];
14325
14326 if (align.pts === gop.pts) {
14327 break;
14328 }
14329
14330 if (gop.pts > align.pts) {
14331 // this current gop starts after the current gop we want to align on, so increment
14332 // align index
14333 alignIndex++;
14334 continue;
14335 } // current gop starts before the current gop we want to align on. so increment gop
14336 // index
14337
14338
14339 gopIndex++;
14340 byteLength -= gop.byteLength;
14341 nalCount -= gop.nalCount;
14342 duration -= gop.duration;
14343 }
14344
14345 if (gopIndex === 0) {
14346 // no gops to trim
14347 return gops;
14348 }
14349
14350 if (gopIndex === gops.length) {
14351 // all gops trimmed, skip appending all gops
14352 return null;
14353 }
14354
14355 alignedGops = gops.slice(gopIndex);
14356 alignedGops.byteLength = byteLength;
14357 alignedGops.duration = duration;
14358 alignedGops.nalCount = nalCount;
14359 alignedGops.pts = alignedGops[0].pts;
14360 alignedGops.dts = alignedGops[0].dts;
14361 return alignedGops;
14362 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
14363 // of gopsToAlignWith starting from the END of the list
14364
14365
14366 this.alignGopsAtEnd_ = function (gops) {
14367 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
14368 alignIndex = gopsToAlignWith.length - 1;
14369 gopIndex = gops.length - 1;
14370 alignEndIndex = null;
14371 matchFound = false;
14372
14373 while (alignIndex >= 0 && gopIndex >= 0) {
14374 align = gopsToAlignWith[alignIndex];
14375 gop = gops[gopIndex];
14376
14377 if (align.pts === gop.pts) {
14378 matchFound = true;
14379 break;
14380 }
14381
14382 if (align.pts > gop.pts) {
14383 alignIndex--;
14384 continue;
14385 }
14386
14387 if (alignIndex === gopsToAlignWith.length - 1) {
14388 // gop.pts is greater than the last alignment candidate. If no match is found
14389 // by the end of this loop, we still want to append gops that come after this
14390 // point
14391 alignEndIndex = gopIndex;
14392 }
14393
14394 gopIndex--;
14395 }
14396
14397 if (!matchFound && alignEndIndex === null) {
14398 return null;
14399 }
14400
14401 var trimIndex;
14402
14403 if (matchFound) {
14404 trimIndex = gopIndex;
14405 } else {
14406 trimIndex = alignEndIndex;
14407 }
14408
14409 if (trimIndex === 0) {
14410 return gops;
14411 }
14412
14413 var alignedGops = gops.slice(trimIndex);
14414 var metadata = alignedGops.reduce(function (total, gop) {
14415 total.byteLength += gop.byteLength;
14416 total.duration += gop.duration;
14417 total.nalCount += gop.nalCount;
14418 return total;
14419 }, {
14420 byteLength: 0,
14421 duration: 0,
14422 nalCount: 0
14423 });
14424 alignedGops.byteLength = metadata.byteLength;
14425 alignedGops.duration = metadata.duration;
14426 alignedGops.nalCount = metadata.nalCount;
14427 alignedGops.pts = alignedGops[0].pts;
14428 alignedGops.dts = alignedGops[0].dts;
14429 return alignedGops;
14430 };
14431
14432 this.alignGopsWith = function (newGopsToAlignWith) {
14433 gopsToAlignWith = newGopsToAlignWith;
14434 };
14435 };
14436
14437 _VideoSegmentStream.prototype = new stream();
14438 /**
14439 * A Stream that can combine multiple streams (ie. audio & video)
14440 * into a single output segment for MSE. Also supports audio-only
14441 * and video-only streams.
14442 * @param options {object} transmuxer options object
14443 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
14444 * in the source; false to adjust the first segment to start at media timeline start.
14445 */
14446
14447 _CoalesceStream = function CoalesceStream(options, metadataStream) {
14448 // Number of Tracks per output segment
14449 // If greater than 1, we combine multiple
14450 // tracks into a single segment
14451 this.numberOfTracks = 0;
14452 this.metadataStream = metadataStream;
14453 options = options || {};
14454
14455 if (typeof options.remux !== 'undefined') {
14456 this.remuxTracks = !!options.remux;
14457 } else {
14458 this.remuxTracks = true;
14459 }
14460
14461 if (typeof options.keepOriginalTimestamps === 'boolean') {
14462 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
14463 } else {
14464 this.keepOriginalTimestamps = false;
14465 }
14466
14467 this.pendingTracks = [];
14468 this.videoTrack = null;
14469 this.pendingBoxes = [];
14470 this.pendingCaptions = [];
14471 this.pendingMetadata = [];
14472 this.pendingBytes = 0;
14473 this.emittedTracks = 0;
14474
14475 _CoalesceStream.prototype.init.call(this); // Take output from multiple
14476
14477
14478 this.push = function (output) {
14479 // buffer incoming captions until the associated video segment
14480 // finishes
14481 if (output.text) {
14482 return this.pendingCaptions.push(output);
14483 } // buffer incoming id3 tags until the final flush
14484
14485
14486 if (output.frames) {
14487 return this.pendingMetadata.push(output);
14488 } // Add this track to the list of pending tracks and store
14489 // important information required for the construction of
14490 // the final segment
14491
14492
14493 this.pendingTracks.push(output.track);
14494 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
14495 // We unshift audio and push video because
14496 // as of Chrome 75 when switching from
14497 // one init segment to another if the video
14498 // mdat does not appear after the audio mdat
14499 // only audio will play for the duration of our transmux.
14500
14501 if (output.track.type === 'video') {
14502 this.videoTrack = output.track;
14503 this.pendingBoxes.push(output.boxes);
14504 }
14505
14506 if (output.track.type === 'audio') {
14507 this.audioTrack = output.track;
14508 this.pendingBoxes.unshift(output.boxes);
14509 }
14510 };
14511 };
14512
14513 _CoalesceStream.prototype = new stream();
14514
14515 _CoalesceStream.prototype.flush = function (flushSource) {
14516 var offset = 0,
14517 event = {
14518 captions: [],
14519 captionStreams: {},
14520 metadata: [],
14521 info: {}
14522 },
14523 caption,
14524 id3,
14525 initSegment,
14526 timelineStartPts = 0,
14527 i;
14528
14529 if (this.pendingTracks.length < this.numberOfTracks) {
14530 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
14531 // Return because we haven't received a flush from a data-generating
14532 // portion of the segment (meaning that we have only recieved meta-data
14533 // or captions.)
14534 return;
14535 } else if (this.remuxTracks) {
14536 // Return until we have enough tracks from the pipeline to remux (if we
14537 // are remuxing audio and video into a single MP4)
14538 return;
14539 } else if (this.pendingTracks.length === 0) {
14540 // In the case where we receive a flush without any data having been
14541 // received we consider it an emitted track for the purposes of coalescing
14542 // `done` events.
14543 // We do this for the case where there is an audio and video track in the
14544 // segment but no audio data. (seen in several playlists with alternate
14545 // audio tracks and no audio present in the main TS segments.)
14546 this.emittedTracks++;
14547
14548 if (this.emittedTracks >= this.numberOfTracks) {
14549 this.trigger('done');
14550 this.emittedTracks = 0;
14551 }
14552
14553 return;
14554 }
14555 }
14556
14557 if (this.videoTrack) {
14558 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
14559 videoProperties.forEach(function (prop) {
14560 event.info[prop] = this.videoTrack[prop];
14561 }, this);
14562 } else if (this.audioTrack) {
14563 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
14564 audioProperties.forEach(function (prop) {
14565 event.info[prop] = this.audioTrack[prop];
14566 }, this);
14567 }
14568
14569 if (this.videoTrack || this.audioTrack) {
14570 if (this.pendingTracks.length === 1) {
14571 event.type = this.pendingTracks[0].type;
14572 } else {
14573 event.type = 'combined';
14574 }
14575
14576 this.emittedTracks += this.pendingTracks.length;
14577 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
14578
14579 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
14580 // and track definitions
14581
14582 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
14583
14584 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
14585
14586 for (i = 0; i < this.pendingBoxes.length; i++) {
14587 event.data.set(this.pendingBoxes[i], offset);
14588 offset += this.pendingBoxes[i].byteLength;
14589 } // Translate caption PTS times into second offsets to match the
14590 // video timeline for the segment, and add track info
14591
14592
14593 for (i = 0; i < this.pendingCaptions.length; i++) {
14594 caption = this.pendingCaptions[i];
14595 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
14596 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
14597 event.captionStreams[caption.stream] = true;
14598 event.captions.push(caption);
14599 } // Translate ID3 frame PTS times into second offsets to match the
14600 // video timeline for the segment
14601
14602
14603 for (i = 0; i < this.pendingMetadata.length; i++) {
14604 id3 = this.pendingMetadata[i];
14605 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
14606 event.metadata.push(id3);
14607 } // We add this to every single emitted segment even though we only need
14608 // it for the first
14609
14610
14611 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
14612
14613 this.pendingTracks.length = 0;
14614 this.videoTrack = null;
14615 this.pendingBoxes.length = 0;
14616 this.pendingCaptions.length = 0;
14617 this.pendingBytes = 0;
14618 this.pendingMetadata.length = 0; // Emit the built segment
14619 // We include captions and ID3 tags for backwards compatibility,
14620 // ideally we should send only video and audio in the data event
14621
14622 this.trigger('data', event); // Emit each caption to the outside world
14623 // Ideally, this would happen immediately on parsing captions,
14624 // but we need to ensure that video data is sent back first
14625 // so that caption timing can be adjusted to match video timing
14626
14627 for (i = 0; i < event.captions.length; i++) {
14628 caption = event.captions[i];
14629 this.trigger('caption', caption);
14630 } // Emit each id3 tag to the outside world
14631 // Ideally, this would happen immediately on parsing the tag,
14632 // but we need to ensure that video data is sent back first
14633 // so that ID3 frame timing can be adjusted to match video timing
14634
14635
14636 for (i = 0; i < event.metadata.length; i++) {
14637 id3 = event.metadata[i];
14638 this.trigger('id3Frame', id3);
14639 }
14640 } // Only emit `done` if all tracks have been flushed and emitted
14641
14642
14643 if (this.emittedTracks >= this.numberOfTracks) {
14644 this.trigger('done');
14645 this.emittedTracks = 0;
14646 }
14647 };
14648
14649 _CoalesceStream.prototype.setRemux = function (val) {
14650 this.remuxTracks = val;
14651 };
14652 /**
14653 * A Stream that expects MP2T binary data as input and produces
14654 * corresponding media segments, suitable for use with Media Source
14655 * Extension (MSE) implementations that support the ISO BMFF byte
14656 * stream format, like Chrome.
14657 */
14658
14659
14660 _Transmuxer = function Transmuxer(options) {
14661 var self = this,
14662 hasFlushed = true,
14663 videoTrack,
14664 audioTrack;
14665
14666 _Transmuxer.prototype.init.call(this);
14667
14668 options = options || {};
14669 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
14670 this.transmuxPipeline_ = {};
14671
14672 this.setupAacPipeline = function () {
14673 var pipeline = {};
14674 this.transmuxPipeline_ = pipeline;
14675 pipeline.type = 'aac';
14676 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
14677
14678 pipeline.aacStream = new aac();
14679 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
14680 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
14681 pipeline.adtsStream = new adts();
14682 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
14683 pipeline.headOfPipeline = pipeline.aacStream;
14684 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
14685 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
14686 pipeline.metadataStream.on('timestamp', function (frame) {
14687 pipeline.aacStream.setTimestamp(frame.timeStamp);
14688 });
14689 pipeline.aacStream.on('data', function (data) {
14690 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
14691 return;
14692 }
14693
14694 audioTrack = audioTrack || {
14695 timelineStartInfo: {
14696 baseMediaDecodeTime: self.baseMediaDecodeTime
14697 },
14698 codec: 'adts',
14699 type: 'audio'
14700 }; // hook up the audio segment stream to the first track with aac data
14701
14702 pipeline.coalesceStream.numberOfTracks++;
14703 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
14704 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
14705
14706 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
14707
14708 self.trigger('trackinfo', {
14709 hasAudio: !!audioTrack,
14710 hasVideo: !!videoTrack
14711 });
14712 }); // Re-emit any data coming from the coalesce stream to the outside world
14713
14714 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
14715
14716 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
14717 };
14718
14719 this.setupTsPipeline = function () {
14720 var pipeline = {};
14721 this.transmuxPipeline_ = pipeline;
14722 pipeline.type = 'ts';
14723 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
14724
14725 pipeline.packetStream = new m2ts_1.TransportPacketStream();
14726 pipeline.parseStream = new m2ts_1.TransportParseStream();
14727 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
14728 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
14729 pipeline.adtsStream = new adts();
14730 pipeline.h264Stream = new H264Stream();
14731 pipeline.captionStream = new m2ts_1.CaptionStream(options);
14732 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
14733 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
14734
14735 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
14736 // demux the streams
14737
14738 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
14739 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
14740 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
14741
14742 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
14743 pipeline.elementaryStream.on('data', function (data) {
14744 var i;
14745
14746 if (data.type === 'metadata') {
14747 i = data.tracks.length; // scan the tracks listed in the metadata
14748
14749 while (i--) {
14750 if (!videoTrack && data.tracks[i].type === 'video') {
14751 videoTrack = data.tracks[i];
14752 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
14753 } else if (!audioTrack && data.tracks[i].type === 'audio') {
14754 audioTrack = data.tracks[i];
14755 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
14756 }
14757 } // hook up the video segment stream to the first track with h264 data
14758
14759
14760 if (videoTrack && !pipeline.videoSegmentStream) {
14761 pipeline.coalesceStream.numberOfTracks++;
14762 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
14763 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
14764 // When video emits timelineStartInfo data after a flush, we forward that
14765 // info to the AudioSegmentStream, if it exists, because video timeline
14766 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
14767 // because this is a particularly subtle form of timestamp alteration.
14768 if (audioTrack && !options.keepOriginalTimestamps) {
14769 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
14770 // very earliest DTS we have seen in video because Chrome will
14771 // interpret any video track with a baseMediaDecodeTime that is
14772 // non-zero as a gap.
14773
14774 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
14775 }
14776 });
14777 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
14778 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
14779 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
14780 if (audioTrack) {
14781 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
14782 }
14783 });
14784 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
14785
14786 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
14787 }
14788
14789 if (audioTrack && !pipeline.audioSegmentStream) {
14790 // hook up the audio segment stream to the first track with aac data
14791 pipeline.coalesceStream.numberOfTracks++;
14792 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
14793 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
14794 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
14795
14796 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
14797 } // emit pmt info
14798
14799
14800 self.trigger('trackinfo', {
14801 hasAudio: !!audioTrack,
14802 hasVideo: !!videoTrack
14803 });
14804 }
14805 }); // Re-emit any data coming from the coalesce stream to the outside world
14806
14807 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
14808 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
14809 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
14810 self.trigger('id3Frame', id3Frame);
14811 });
14812 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
14813
14814 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
14815 }; // hook up the segment streams once track metadata is delivered
14816
14817
14818 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
14819 var pipeline = this.transmuxPipeline_;
14820
14821 if (!options.keepOriginalTimestamps) {
14822 this.baseMediaDecodeTime = baseMediaDecodeTime;
14823 }
14824
14825 if (audioTrack) {
14826 audioTrack.timelineStartInfo.dts = undefined;
14827 audioTrack.timelineStartInfo.pts = undefined;
14828 trackDecodeInfo.clearDtsInfo(audioTrack);
14829
14830 if (pipeline.audioTimestampRolloverStream) {
14831 pipeline.audioTimestampRolloverStream.discontinuity();
14832 }
14833 }
14834
14835 if (videoTrack) {
14836 if (pipeline.videoSegmentStream) {
14837 pipeline.videoSegmentStream.gopCache_ = [];
14838 }
14839
14840 videoTrack.timelineStartInfo.dts = undefined;
14841 videoTrack.timelineStartInfo.pts = undefined;
14842 trackDecodeInfo.clearDtsInfo(videoTrack);
14843 pipeline.captionStream.reset();
14844 }
14845
14846 if (pipeline.timestampRolloverStream) {
14847 pipeline.timestampRolloverStream.discontinuity();
14848 }
14849 };
14850
14851 this.setAudioAppendStart = function (timestamp) {
14852 if (audioTrack) {
14853 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
14854 }
14855 };
14856
14857 this.setRemux = function (val) {
14858 var pipeline = this.transmuxPipeline_;
14859 options.remux = val;
14860
14861 if (pipeline && pipeline.coalesceStream) {
14862 pipeline.coalesceStream.setRemux(val);
14863 }
14864 };
14865
14866 this.alignGopsWith = function (gopsToAlignWith) {
14867 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
14868 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
14869 }
14870 }; // feed incoming data to the front of the parsing pipeline
14871
14872
14873 this.push = function (data) {
14874 if (hasFlushed) {
14875 var isAac = isLikelyAacData$1(data);
14876
14877 if (isAac && this.transmuxPipeline_.type !== 'aac') {
14878 this.setupAacPipeline();
14879 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
14880 this.setupTsPipeline();
14881 }
14882
14883 hasFlushed = false;
14884 }
14885
14886 this.transmuxPipeline_.headOfPipeline.push(data);
14887 }; // flush any buffered data
14888
14889
14890 this.flush = function () {
14891 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
14892
14893 this.transmuxPipeline_.headOfPipeline.flush();
14894 };
14895
14896 this.endTimeline = function () {
14897 this.transmuxPipeline_.headOfPipeline.endTimeline();
14898 };
14899
14900 this.reset = function () {
14901 if (this.transmuxPipeline_.headOfPipeline) {
14902 this.transmuxPipeline_.headOfPipeline.reset();
14903 }
14904 }; // Caption data has to be reset when seeking outside buffered range
14905
14906
14907 this.resetCaptions = function () {
14908 if (this.transmuxPipeline_.captionStream) {
14909 this.transmuxPipeline_.captionStream.reset();
14910 }
14911 };
14912 };
14913
14914 _Transmuxer.prototype = new stream();
14915 var transmuxer$1 = {
14916 Transmuxer: _Transmuxer,
14917 VideoSegmentStream: _VideoSegmentStream,
14918 AudioSegmentStream: _AudioSegmentStream,
14919 AUDIO_PROPERTIES: audioProperties,
14920 VIDEO_PROPERTIES: videoProperties,
14921 // exported for testing
14922 generateSegmentTimingInfo: generateSegmentTimingInfo
14923 };
14924 /**
14925 * mux.js
14926 *
14927 * Copyright (c) Brightcove
14928 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14929 */
14930
14931 var codecs = {
14932 Adts: adts,
14933 h264: h264
14934 };
14935 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
14936 /**
14937 * Constructs a single-track, ISO BMFF media segment from AAC data
14938 * events. The output of this stream can be fed to a SourceBuffer
14939 * configured with a suitable initialization segment.
14940 */
14941
14942 var AudioSegmentStream = function AudioSegmentStream(track, options) {
14943 var adtsFrames = [],
14944 sequenceNumber = 0,
14945 earliestAllowedDts = 0,
14946 audioAppendStartTs = 0,
14947 videoBaseMediaDecodeTime = Infinity,
14948 segmentStartPts = null,
14949 segmentEndPts = null;
14950 options = options || {};
14951 AudioSegmentStream.prototype.init.call(this);
14952
14953 this.push = function (data) {
14954 trackDecodeInfo.collectDtsInfo(track, data);
14955
14956 if (track) {
14957 audioProperties.forEach(function (prop) {
14958 track[prop] = data[prop];
14959 });
14960 } // buffer audio data until end() is called
14961
14962
14963 adtsFrames.push(data);
14964 };
14965
14966 this.setEarliestDts = function (earliestDts) {
14967 earliestAllowedDts = earliestDts;
14968 };
14969
14970 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
14971 videoBaseMediaDecodeTime = baseMediaDecodeTime;
14972 };
14973
14974 this.setAudioAppendStart = function (timestamp) {
14975 audioAppendStartTs = timestamp;
14976 };
14977
14978 this.processFrames_ = function () {
14979 var frames, moof, mdat, boxes, timingInfo; // return early if no audio data has been observed
14980
14981 if (adtsFrames.length === 0) {
14982 return;
14983 }
14984
14985 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
14986
14987 if (frames.length === 0) {
14988 // return early if the frames are all after the earliest allowed DTS
14989 // TODO should we clear the adtsFrames?
14990 return;
14991 }
14992
14993 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
14994 audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
14995 // samples (that is, adts frames) in the audio data
14996
14997 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
14998
14999 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
15000 adtsFrames = [];
15001 moof = mp4Generator.moof(sequenceNumber, [track]); // bump the sequence number for next time
15002
15003 sequenceNumber++;
15004 track.initSegment = mp4Generator.initSegment([track]); // it would be great to allocate this array up front instead of
15005 // throwing away hundreds of media segment fragments
15006
15007 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
15008 boxes.set(moof);
15009 boxes.set(mdat, moof.byteLength);
15010 trackDecodeInfo.clearDtsInfo(track);
15011
15012 if (segmentStartPts === null) {
15013 segmentEndPts = segmentStartPts = frames[0].pts;
15014 }
15015
15016 segmentEndPts += frames.length * (ONE_SECOND_IN_TS * 1024 / track.samplerate);
15017 timingInfo = {
15018 start: segmentStartPts
15019 };
15020 this.trigger('timingInfo', timingInfo);
15021 this.trigger('data', {
15022 track: track,
15023 boxes: boxes
15024 });
15025 };
15026
15027 this.flush = function () {
15028 this.processFrames_(); // trigger final timing info
15029
15030 this.trigger('timingInfo', {
15031 start: segmentStartPts,
15032 end: segmentEndPts
15033 });
15034 this.resetTiming_();
15035 this.trigger('done', 'AudioSegmentStream');
15036 };
15037
15038 this.partialFlush = function () {
15039 this.processFrames_();
15040 this.trigger('partialdone', 'AudioSegmentStream');
15041 };
15042
15043 this.endTimeline = function () {
15044 this.flush();
15045 this.trigger('endedtimeline', 'AudioSegmentStream');
15046 };
15047
15048 this.resetTiming_ = function () {
15049 trackDecodeInfo.clearDtsInfo(track);
15050 segmentStartPts = null;
15051 segmentEndPts = null;
15052 };
15053
15054 this.reset = function () {
15055 this.resetTiming_();
15056 adtsFrames = [];
15057 this.trigger('reset');
15058 };
15059 };
15060
15061 AudioSegmentStream.prototype = new stream();
15062 var audioSegmentStream = AudioSegmentStream;
15063
15064 var VideoSegmentStream = function VideoSegmentStream(track, options) {
15065 var sequenceNumber = 0,
15066 nalUnits = [],
15067 frameCache = [],
15068 // gopsToAlignWith = [],
15069 config,
15070 pps,
15071 segmentStartPts = null,
15072 segmentEndPts = null,
15073 gops,
15074 ensureNextFrameIsKeyFrame = true;
15075 options = options || {};
15076 VideoSegmentStream.prototype.init.call(this);
15077
15078 this.push = function (nalUnit) {
15079 trackDecodeInfo.collectDtsInfo(track, nalUnit);
15080
15081 if (typeof track.timelineStartInfo.dts === 'undefined') {
15082 track.timelineStartInfo.dts = nalUnit.dts;
15083 } // record the track config
15084
15085
15086 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
15087 config = nalUnit.config;
15088 track.sps = [nalUnit.data];
15089 videoProperties.forEach(function (prop) {
15090 track[prop] = config[prop];
15091 }, this);
15092 }
15093
15094 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
15095 pps = nalUnit.data;
15096 track.pps = [nalUnit.data];
15097 } // buffer video until flush() is called
15098
15099
15100 nalUnits.push(nalUnit);
15101 };
15102
15103 this.processNals_ = function (cacheLastFrame) {
15104 var i;
15105 nalUnits = frameCache.concat(nalUnits); // Throw away nalUnits at the start of the byte stream until
15106 // we find the first AUD
15107
15108 while (nalUnits.length) {
15109 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
15110 break;
15111 }
15112
15113 nalUnits.shift();
15114 } // Return early if no video data has been observed
15115
15116
15117 if (nalUnits.length === 0) {
15118 return;
15119 }
15120
15121 var frames = frameUtils.groupNalsIntoFrames(nalUnits);
15122
15123 if (!frames.length) {
15124 return;
15125 } // note that the frame cache may also protect us from cases where we haven't
15126 // pushed data for the entire first or last frame yet
15127
15128
15129 frameCache = frames[frames.length - 1];
15130
15131 if (cacheLastFrame) {
15132 frames.pop();
15133 frames.duration -= frameCache.duration;
15134 frames.nalCount -= frameCache.length;
15135 frames.byteLength -= frameCache.byteLength;
15136 }
15137
15138 if (!frames.length) {
15139 nalUnits = [];
15140 return;
15141 }
15142
15143 this.trigger('timelineStartInfo', track.timelineStartInfo);
15144
15145 if (ensureNextFrameIsKeyFrame) {
15146 gops = frameUtils.groupFramesIntoGops(frames);
15147
15148 if (!gops[0][0].keyFrame) {
15149 gops = frameUtils.extendFirstKeyFrame(gops);
15150
15151 if (!gops[0][0].keyFrame) {
15152 // we haven't yet gotten a key frame, so reset nal units to wait for more nal
15153 // units
15154 nalUnits = [].concat.apply([], frames).concat(frameCache);
15155 frameCache = [];
15156 return;
15157 }
15158
15159 frames = [].concat.apply([], gops);
15160 frames.duration = gops.duration;
15161 }
15162
15163 ensureNextFrameIsKeyFrame = false;
15164 }
15165
15166 if (segmentStartPts === null) {
15167 segmentStartPts = frames[0].pts;
15168 segmentEndPts = segmentStartPts;
15169 }
15170
15171 segmentEndPts += frames.duration;
15172 this.trigger('timingInfo', {
15173 start: segmentStartPts,
15174 end: segmentEndPts
15175 });
15176
15177 for (i = 0; i < frames.length; i++) {
15178 var frame = frames[i];
15179 track.samples = frameUtils.generateSampleTableForFrame(frame);
15180 var mdat = mp4Generator.mdat(frameUtils.concatenateNalDataForFrame(frame));
15181 trackDecodeInfo.clearDtsInfo(track);
15182 trackDecodeInfo.collectDtsInfo(track, frame);
15183 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
15184 var moof = mp4Generator.moof(sequenceNumber, [track]);
15185 sequenceNumber++;
15186 track.initSegment = mp4Generator.initSegment([track]);
15187 var boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
15188 boxes.set(moof);
15189 boxes.set(mdat, moof.byteLength);
15190 this.trigger('data', {
15191 track: track,
15192 boxes: boxes,
15193 sequence: sequenceNumber,
15194 videoFrameDts: frame.dts,
15195 videoFramePts: frame.pts
15196 });
15197 }
15198
15199 nalUnits = [];
15200 };
15201
15202 this.resetTimingAndConfig_ = function () {
15203 config = undefined;
15204 pps = undefined;
15205 segmentStartPts = null;
15206 segmentEndPts = null;
15207 };
15208
15209 this.partialFlush = function () {
15210 this.processNals_(true);
15211 this.trigger('partialdone', 'VideoSegmentStream');
15212 };
15213
15214 this.flush = function () {
15215 this.processNals_(false); // reset config and pps because they may differ across segments
15216 // for instance, when we are rendition switching
15217
15218 this.resetTimingAndConfig_();
15219 this.trigger('done', 'VideoSegmentStream');
15220 };
15221
15222 this.endTimeline = function () {
15223 this.flush();
15224 this.trigger('endedtimeline', 'VideoSegmentStream');
15225 };
15226
15227 this.reset = function () {
15228 this.resetTimingAndConfig_();
15229 frameCache = [];
15230 nalUnits = [];
15231 ensureNextFrameIsKeyFrame = true;
15232 this.trigger('reset');
15233 };
15234 };
15235
15236 VideoSegmentStream.prototype = new stream();
15237 var videoSegmentStream = VideoSegmentStream;
15238 var isLikelyAacData = utils.isLikelyAacData;
15239
15240 var createPipeline = function createPipeline(object) {
15241 object.prototype = new stream();
15242 object.prototype.init.call(object);
15243 return object;
15244 };
15245
15246 var tsPipeline = function tsPipeline(options) {
15247 var pipeline = {
15248 type: 'ts',
15249 tracks: {
15250 audio: null,
15251 video: null
15252 },
15253 packet: new m2ts_1.TransportPacketStream(),
15254 parse: new m2ts_1.TransportParseStream(),
15255 elementary: new m2ts_1.ElementaryStream(),
15256 timestampRollover: new m2ts_1.TimestampRolloverStream(),
15257 adts: new codecs.Adts(),
15258 h264: new codecs.h264.H264Stream(),
15259 captionStream: new m2ts_1.CaptionStream(options),
15260 metadataStream: new m2ts_1.MetadataStream()
15261 };
15262 pipeline.headOfPipeline = pipeline.packet; // Transport Stream
15263
15264 pipeline.packet.pipe(pipeline.parse).pipe(pipeline.elementary).pipe(pipeline.timestampRollover); // H264
15265
15266 pipeline.timestampRollover.pipe(pipeline.h264); // Hook up CEA-608/708 caption stream
15267
15268 pipeline.h264.pipe(pipeline.captionStream);
15269 pipeline.timestampRollover.pipe(pipeline.metadataStream); // ADTS
15270
15271 pipeline.timestampRollover.pipe(pipeline.adts);
15272 pipeline.elementary.on('data', function (data) {
15273 if (data.type !== 'metadata') {
15274 return;
15275 }
15276
15277 for (var i = 0; i < data.tracks.length; i++) {
15278 if (!pipeline.tracks[data.tracks[i].type]) {
15279 pipeline.tracks[data.tracks[i].type] = data.tracks[i];
15280 pipeline.tracks[data.tracks[i].type].timelineStartInfo.baseMediaDecodeTime = options.baseMediaDecodeTime;
15281 }
15282 }
15283
15284 if (pipeline.tracks.video && !pipeline.videoSegmentStream) {
15285 pipeline.videoSegmentStream = new videoSegmentStream(pipeline.tracks.video, options);
15286 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
15287 if (pipeline.tracks.audio && !options.keepOriginalTimestamps) {
15288 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - options.baseMediaDecodeTime);
15289 }
15290 });
15291 pipeline.videoSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'videoTimingInfo'));
15292 pipeline.videoSegmentStream.on('data', function (data) {
15293 pipeline.trigger('data', {
15294 type: 'video',
15295 data: data
15296 });
15297 });
15298 pipeline.videoSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
15299 pipeline.videoSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
15300 pipeline.videoSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
15301 pipeline.h264.pipe(pipeline.videoSegmentStream);
15302 }
15303
15304 if (pipeline.tracks.audio && !pipeline.audioSegmentStream) {
15305 pipeline.audioSegmentStream = new audioSegmentStream(pipeline.tracks.audio, options);
15306 pipeline.audioSegmentStream.on('data', function (data) {
15307 pipeline.trigger('data', {
15308 type: 'audio',
15309 data: data
15310 });
15311 });
15312 pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
15313 pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
15314 pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
15315 pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo'));
15316 pipeline.adts.pipe(pipeline.audioSegmentStream);
15317 } // emit pmt info
15318
15319
15320 pipeline.trigger('trackinfo', {
15321 hasAudio: !!pipeline.tracks.audio,
15322 hasVideo: !!pipeline.tracks.video
15323 });
15324 });
15325 pipeline.captionStream.on('data', function (caption) {
15326 var timelineStartPts;
15327
15328 if (pipeline.tracks.video) {
15329 timelineStartPts = pipeline.tracks.video.timelineStartInfo.pts || 0;
15330 } else {
15331 // This will only happen if we encounter caption packets before
15332 // video data in a segment. This is an unusual/unlikely scenario,
15333 // so we assume the timeline starts at zero for now.
15334 timelineStartPts = 0;
15335 } // Translate caption PTS times into second offsets into the
15336 // video timeline for the segment
15337
15338
15339 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, options.keepOriginalTimestamps);
15340 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, options.keepOriginalTimestamps);
15341 pipeline.trigger('caption', caption);
15342 });
15343 pipeline = createPipeline(pipeline);
15344 pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
15345 return pipeline;
15346 };
15347
15348 var aacPipeline = function aacPipeline(options) {
15349 var pipeline = {
15350 type: 'aac',
15351 tracks: {
15352 audio: null
15353 },
15354 metadataStream: new m2ts_1.MetadataStream(),
15355 aacStream: new aac(),
15356 audioRollover: new m2ts_1.TimestampRolloverStream('audio'),
15357 timedMetadataRollover: new m2ts_1.TimestampRolloverStream('timed-metadata'),
15358 adtsStream: new adts(true)
15359 }; // set up the parsing pipeline
15360
15361 pipeline.headOfPipeline = pipeline.aacStream;
15362 pipeline.aacStream.pipe(pipeline.audioRollover).pipe(pipeline.adtsStream);
15363 pipeline.aacStream.pipe(pipeline.timedMetadataRollover).pipe(pipeline.metadataStream);
15364 pipeline.metadataStream.on('timestamp', function (frame) {
15365 pipeline.aacStream.setTimestamp(frame.timeStamp);
15366 });
15367 pipeline.aacStream.on('data', function (data) {
15368 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
15369 return;
15370 }
15371
15372 pipeline.tracks.audio = pipeline.tracks.audio || {
15373 timelineStartInfo: {
15374 baseMediaDecodeTime: options.baseMediaDecodeTime
15375 },
15376 codec: 'adts',
15377 type: 'audio'
15378 }; // hook up the audio segment stream to the first track with aac data
15379
15380 pipeline.audioSegmentStream = new audioSegmentStream(pipeline.tracks.audio, options);
15381 pipeline.audioSegmentStream.on('data', function (data) {
15382 pipeline.trigger('data', {
15383 type: 'audio',
15384 data: data
15385 });
15386 });
15387 pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
15388 pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
15389 pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
15390 pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo')); // Set up the final part of the audio pipeline
15391
15392 pipeline.adtsStream.pipe(pipeline.audioSegmentStream);
15393 pipeline.trigger('trackinfo', {
15394 hasAudio: !!pipeline.tracks.audio,
15395 hasVideo: !!pipeline.tracks.video
15396 });
15397 }); // set the pipeline up as a stream before binding to get access to the trigger function
15398
15399 pipeline = createPipeline(pipeline);
15400 pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
15401 return pipeline;
15402 };
15403
15404 var setupPipelineListeners = function setupPipelineListeners(pipeline, transmuxer) {
15405 pipeline.on('data', transmuxer.trigger.bind(transmuxer, 'data'));
15406 pipeline.on('done', transmuxer.trigger.bind(transmuxer, 'done'));
15407 pipeline.on('partialdone', transmuxer.trigger.bind(transmuxer, 'partialdone'));
15408 pipeline.on('endedtimeline', transmuxer.trigger.bind(transmuxer, 'endedtimeline'));
15409 pipeline.on('audioTimingInfo', transmuxer.trigger.bind(transmuxer, 'audioTimingInfo'));
15410 pipeline.on('videoTimingInfo', transmuxer.trigger.bind(transmuxer, 'videoTimingInfo'));
15411 pipeline.on('trackinfo', transmuxer.trigger.bind(transmuxer, 'trackinfo'));
15412 pipeline.on('id3Frame', function (event) {
15413 // add this to every single emitted segment even though it's only needed for the first
15414 event.dispatchType = pipeline.metadataStream.dispatchType; // keep original time, can be adjusted if needed at a higher level
15415
15416 event.cueTime = clock.videoTsToSeconds(event.pts);
15417 transmuxer.trigger('id3Frame', event);
15418 });
15419 pipeline.on('caption', function (event) {
15420 transmuxer.trigger('caption', event);
15421 });
15422 };
15423
15424 var Transmuxer = function Transmuxer(options) {
15425 var pipeline = null,
15426 hasFlushed = true;
15427 options = options || {};
15428 Transmuxer.prototype.init.call(this);
15429 options.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
15430
15431 this.push = function (bytes) {
15432 if (hasFlushed) {
15433 var isAac = isLikelyAacData(bytes);
15434
15435 if (isAac && (!pipeline || pipeline.type !== 'aac')) {
15436 pipeline = aacPipeline(options);
15437 setupPipelineListeners(pipeline, this);
15438 } else if (!isAac && (!pipeline || pipeline.type !== 'ts')) {
15439 pipeline = tsPipeline(options);
15440 setupPipelineListeners(pipeline, this);
15441 }
15442
15443 hasFlushed = false;
15444 }
15445
15446 pipeline.headOfPipeline.push(bytes);
15447 };
15448
15449 this.flush = function () {
15450 if (!pipeline) {
15451 return;
15452 }
15453
15454 hasFlushed = true;
15455 pipeline.headOfPipeline.flush();
15456 };
15457
15458 this.partialFlush = function () {
15459 if (!pipeline) {
15460 return;
15461 }
15462
15463 pipeline.headOfPipeline.partialFlush();
15464 };
15465
15466 this.endTimeline = function () {
15467 if (!pipeline) {
15468 return;
15469 }
15470
15471 pipeline.headOfPipeline.endTimeline();
15472 };
15473
15474 this.reset = function () {
15475 if (!pipeline) {
15476 return;
15477 }
15478
15479 pipeline.headOfPipeline.reset();
15480 };
15481
15482 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
15483 if (!options.keepOriginalTimestamps) {
15484 options.baseMediaDecodeTime = baseMediaDecodeTime;
15485 }
15486
15487 if (!pipeline) {
15488 return;
15489 }
15490
15491 if (pipeline.tracks.audio) {
15492 pipeline.tracks.audio.timelineStartInfo.dts = undefined;
15493 pipeline.tracks.audio.timelineStartInfo.pts = undefined;
15494 trackDecodeInfo.clearDtsInfo(pipeline.tracks.audio);
15495
15496 if (pipeline.audioRollover) {
15497 pipeline.audioRollover.discontinuity();
15498 }
15499 }
15500
15501 if (pipeline.tracks.video) {
15502 if (pipeline.videoSegmentStream) {
15503 pipeline.videoSegmentStream.gopCache_ = [];
15504 }
15505
15506 pipeline.tracks.video.timelineStartInfo.dts = undefined;
15507 pipeline.tracks.video.timelineStartInfo.pts = undefined;
15508 trackDecodeInfo.clearDtsInfo(pipeline.tracks.video); // pipeline.captionStream.reset();
15509 }
15510
15511 if (pipeline.timestampRollover) {
15512 pipeline.timestampRollover.discontinuity();
15513 }
15514 };
15515
15516 this.setRemux = function (val) {
15517 options.remux = val;
15518
15519 if (pipeline && pipeline.coalesceStream) {
15520 pipeline.coalesceStream.setRemux(val);
15521 }
15522 };
15523
15524 this.setAudioAppendStart = function (audioAppendStart) {
15525 if (!pipeline || !pipeline.tracks.audio || !pipeline.audioSegmentStream) {
15526 return;
15527 }
15528
15529 pipeline.audioSegmentStream.setAudioAppendStart(audioAppendStart);
15530 }; // TODO GOP alignment support
15531 // Support may be a bit trickier than with full segment appends, as GOPs may be split
15532 // and processed in a more granular fashion
15533
15534
15535 this.alignGopsWith = function (gopsToAlignWith) {
15536 return;
15537 };
15538 };
15539
15540 Transmuxer.prototype = new stream();
15541 var transmuxer = Transmuxer;
15542 /**
15543 * mux.js
15544 *
15545 * Copyright (c) Brightcove
15546 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15547 */
15548
15549 var toUnsigned$2 = function toUnsigned(value) {
15550 return value >>> 0;
15551 };
15552
15553 var toHexString = function toHexString(value) {
15554 return ('00' + value.toString(16)).slice(-2);
15555 };
15556
15557 var bin = {
15558 toUnsigned: toUnsigned$2,
15559 toHexString: toHexString
15560 };
15561
15562 var parseType = function parseType(buffer) {
15563 var result = '';
15564 result += String.fromCharCode(buffer[0]);
15565 result += String.fromCharCode(buffer[1]);
15566 result += String.fromCharCode(buffer[2]);
15567 result += String.fromCharCode(buffer[3]);
15568 return result;
15569 };
15570
15571 var parseType_1 = parseType;
15572 var toUnsigned$1 = bin.toUnsigned;
15573
15574 var findBox = function findBox(data, path) {
15575 var results = [],
15576 i,
15577 size,
15578 type,
15579 end,
15580 subresults;
15581
15582 if (!path.length) {
15583 // short-circuit the search for empty paths
15584 return null;
15585 }
15586
15587 for (i = 0; i < data.byteLength;) {
15588 size = toUnsigned$1(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
15589 type = parseType_1(data.subarray(i + 4, i + 8));
15590 end = size > 1 ? i + size : data.byteLength;
15591
15592 if (type === path[0]) {
15593 if (path.length === 1) {
15594 // this is the end of the path and we've found the box we were
15595 // looking for
15596 results.push(data.subarray(i + 8, end));
15597 } else {
15598 // recursively search for the next box along the path
15599 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
15600
15601 if (subresults.length) {
15602 results = results.concat(subresults);
15603 }
15604 }
15605 }
15606
15607 i = end;
15608 } // we've finished searching all of data
15609
15610
15611 return results;
15612 };
15613
15614 var findBox_1 = findBox;
15615 var toUnsigned = bin.toUnsigned;
15616
15617 var tfdt = function tfdt(data) {
15618 var result = {
15619 version: data[0],
15620 flags: new Uint8Array(data.subarray(1, 4)),
15621 baseMediaDecodeTime: toUnsigned(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
15622 };
15623
15624 if (result.version === 1) {
15625 result.baseMediaDecodeTime *= Math.pow(2, 32);
15626 result.baseMediaDecodeTime += toUnsigned(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
15627 }
15628
15629 return result;
15630 };
15631
15632 var parseTfdt = tfdt;
15633
15634 var parseSampleFlags = function parseSampleFlags(flags) {
15635 return {
15636 isLeading: (flags[0] & 0x0c) >>> 2,
15637 dependsOn: flags[0] & 0x03,
15638 isDependedOn: (flags[1] & 0xc0) >>> 6,
15639 hasRedundancy: (flags[1] & 0x30) >>> 4,
15640 paddingValue: (flags[1] & 0x0e) >>> 1,
15641 isNonSyncSample: flags[1] & 0x01,
15642 degradationPriority: flags[2] << 8 | flags[3]
15643 };
15644 };
15645
15646 var parseSampleFlags_1 = parseSampleFlags;
15647
15648 var trun = function trun(data) {
15649 var result = {
15650 version: data[0],
15651 flags: new Uint8Array(data.subarray(1, 4)),
15652 samples: []
15653 },
15654 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
15655 // Flag interpretation
15656 dataOffsetPresent = result.flags[2] & 0x01,
15657 // compare with 2nd byte of 0x1
15658 firstSampleFlagsPresent = result.flags[2] & 0x04,
15659 // compare with 2nd byte of 0x4
15660 sampleDurationPresent = result.flags[1] & 0x01,
15661 // compare with 2nd byte of 0x100
15662 sampleSizePresent = result.flags[1] & 0x02,
15663 // compare with 2nd byte of 0x200
15664 sampleFlagsPresent = result.flags[1] & 0x04,
15665 // compare with 2nd byte of 0x400
15666 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
15667 // compare with 2nd byte of 0x800
15668 sampleCount = view.getUint32(4),
15669 offset = 8,
15670 sample;
15671
15672 if (dataOffsetPresent) {
15673 // 32 bit signed integer
15674 result.dataOffset = view.getInt32(offset);
15675 offset += 4;
15676 } // Overrides the flags for the first sample only. The order of
15677 // optional values will be: duration, size, compositionTimeOffset
15678
15679
15680 if (firstSampleFlagsPresent && sampleCount) {
15681 sample = {
15682 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
15683 };
15684 offset += 4;
15685
15686 if (sampleDurationPresent) {
15687 sample.duration = view.getUint32(offset);
15688 offset += 4;
15689 }
15690
15691 if (sampleSizePresent) {
15692 sample.size = view.getUint32(offset);
15693 offset += 4;
15694 }
15695
15696 if (sampleCompositionTimeOffsetPresent) {
15697 if (result.version === 1) {
15698 sample.compositionTimeOffset = view.getInt32(offset);
15699 } else {
15700 sample.compositionTimeOffset = view.getUint32(offset);
15701 }
15702
15703 offset += 4;
15704 }
15705
15706 result.samples.push(sample);
15707 sampleCount--;
15708 }
15709
15710 while (sampleCount--) {
15711 sample = {};
15712
15713 if (sampleDurationPresent) {
15714 sample.duration = view.getUint32(offset);
15715 offset += 4;
15716 }
15717
15718 if (sampleSizePresent) {
15719 sample.size = view.getUint32(offset);
15720 offset += 4;
15721 }
15722
15723 if (sampleFlagsPresent) {
15724 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
15725 offset += 4;
15726 }
15727
15728 if (sampleCompositionTimeOffsetPresent) {
15729 if (result.version === 1) {
15730 sample.compositionTimeOffset = view.getInt32(offset);
15731 } else {
15732 sample.compositionTimeOffset = view.getUint32(offset);
15733 }
15734
15735 offset += 4;
15736 }
15737
15738 result.samples.push(sample);
15739 }
15740
15741 return result;
15742 };
15743
15744 var parseTrun = trun;
15745
15746 var tfhd = function tfhd(data) {
15747 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
15748 result = {
15749 version: data[0],
15750 flags: new Uint8Array(data.subarray(1, 4)),
15751 trackId: view.getUint32(4)
15752 },
15753 baseDataOffsetPresent = result.flags[2] & 0x01,
15754 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
15755 defaultSampleDurationPresent = result.flags[2] & 0x08,
15756 defaultSampleSizePresent = result.flags[2] & 0x10,
15757 defaultSampleFlagsPresent = result.flags[2] & 0x20,
15758 durationIsEmpty = result.flags[0] & 0x010000,
15759 defaultBaseIsMoof = result.flags[0] & 0x020000,
15760 i;
15761 i = 8;
15762
15763 if (baseDataOffsetPresent) {
15764 i += 4; // truncate top 4 bytes
15765 // FIXME: should we read the full 64 bits?
15766
15767 result.baseDataOffset = view.getUint32(12);
15768 i += 4;
15769 }
15770
15771 if (sampleDescriptionIndexPresent) {
15772 result.sampleDescriptionIndex = view.getUint32(i);
15773 i += 4;
15774 }
15775
15776 if (defaultSampleDurationPresent) {
15777 result.defaultSampleDuration = view.getUint32(i);
15778 i += 4;
15779 }
15780
15781 if (defaultSampleSizePresent) {
15782 result.defaultSampleSize = view.getUint32(i);
15783 i += 4;
15784 }
15785
15786 if (defaultSampleFlagsPresent) {
15787 result.defaultSampleFlags = view.getUint32(i);
15788 }
15789
15790 if (durationIsEmpty) {
15791 result.durationIsEmpty = true;
15792 }
15793
15794 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
15795 result.baseDataOffsetIsMoof = true;
15796 }
15797
15798 return result;
15799 };
15800
15801 var parseTfhd = tfhd;
15802 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
15803 var CaptionStream = captionStream.CaptionStream;
15804 /**
15805 * Maps an offset in the mdat to a sample based on the the size of the samples.
15806 * Assumes that `parseSamples` has been called first.
15807 *
15808 * @param {Number} offset - The offset into the mdat
15809 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
15810 * @return {?Object} The matching sample, or null if no match was found.
15811 *
15812 * @see ISO-BMFF-12/2015, Section 8.8.8
15813 **/
15814
15815 var mapToSample = function mapToSample(offset, samples) {
15816 var approximateOffset = offset;
15817
15818 for (var i = 0; i < samples.length; i++) {
15819 var sample = samples[i];
15820
15821 if (approximateOffset < sample.size) {
15822 return sample;
15823 }
15824
15825 approximateOffset -= sample.size;
15826 }
15827
15828 return null;
15829 };
15830 /**
15831 * Finds SEI nal units contained in a Media Data Box.
15832 * Assumes that `parseSamples` has been called first.
15833 *
15834 * @param {Uint8Array} avcStream - The bytes of the mdat
15835 * @param {Object[]} samples - The samples parsed out by `parseSamples`
15836 * @param {Number} trackId - The trackId of this video track
15837 * @return {Object[]} seiNals - the parsed SEI NALUs found.
15838 * The contents of the seiNal should match what is expected by
15839 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
15840 *
15841 * @see ISO-BMFF-12/2015, Section 8.1.1
15842 * @see Rec. ITU-T H.264, 7.3.2.3.1
15843 **/
15844
15845
15846 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
15847 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
15848 result = [],
15849 seiNal,
15850 i,
15851 length,
15852 lastMatchedSample;
15853
15854 for (i = 0; i + 4 < avcStream.length; i += length) {
15855 length = avcView.getUint32(i);
15856 i += 4; // Bail if this doesn't appear to be an H264 stream
15857
15858 if (length <= 0) {
15859 continue;
15860 }
15861
15862 switch (avcStream[i] & 0x1F) {
15863 case 0x06:
15864 var data = avcStream.subarray(i + 1, i + 1 + length);
15865 var matchingSample = mapToSample(i, samples);
15866 seiNal = {
15867 nalUnitType: 'sei_rbsp',
15868 size: length,
15869 data: data,
15870 escapedRBSP: discardEmulationPreventionBytes(data),
15871 trackId: trackId
15872 };
15873
15874 if (matchingSample) {
15875 seiNal.pts = matchingSample.pts;
15876 seiNal.dts = matchingSample.dts;
15877 lastMatchedSample = matchingSample;
15878 } else if (lastMatchedSample) {
15879 // If a matching sample cannot be found, use the last
15880 // sample's values as they should be as close as possible
15881 seiNal.pts = lastMatchedSample.pts;
15882 seiNal.dts = lastMatchedSample.dts;
15883 } else {
15884 // eslint-disable-next-line no-console
15885 console.log("We've encountered a nal unit without data. See mux.js#233.");
15886 break;
15887 }
15888
15889 result.push(seiNal);
15890 break;
15891 }
15892 }
15893
15894 return result;
15895 };
15896 /**
15897 * Parses sample information out of Track Run Boxes and calculates
15898 * the absolute presentation and decode timestamps of each sample.
15899 *
15900 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
15901 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
15902 @see ISO-BMFF-12/2015, Section 8.8.12
15903 * @param {Object} tfhd - The parsed Track Fragment Header
15904 * @see inspect.parseTfhd
15905 * @return {Object[]} the parsed samples
15906 *
15907 * @see ISO-BMFF-12/2015, Section 8.8.8
15908 **/
15909
15910
15911 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
15912 var currentDts = baseMediaDecodeTime;
15913 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
15914 var defaultSampleSize = tfhd.defaultSampleSize || 0;
15915 var trackId = tfhd.trackId;
15916 var allSamples = [];
15917 truns.forEach(function (trun) {
15918 // Note: We currently do not parse the sample table as well
15919 // as the trun. It's possible some sources will require this.
15920 // moov > trak > mdia > minf > stbl
15921 var trackRun = parseTrun(trun);
15922 var samples = trackRun.samples;
15923 samples.forEach(function (sample) {
15924 if (sample.duration === undefined) {
15925 sample.duration = defaultSampleDuration;
15926 }
15927
15928 if (sample.size === undefined) {
15929 sample.size = defaultSampleSize;
15930 }
15931
15932 sample.trackId = trackId;
15933 sample.dts = currentDts;
15934
15935 if (sample.compositionTimeOffset === undefined) {
15936 sample.compositionTimeOffset = 0;
15937 }
15938
15939 sample.pts = currentDts + sample.compositionTimeOffset;
15940 currentDts += sample.duration;
15941 });
15942 allSamples = allSamples.concat(samples);
15943 });
15944 return allSamples;
15945 };
15946 /**
15947 * Parses out caption nals from an FMP4 segment's video tracks.
15948 *
15949 * @param {Uint8Array} segment - The bytes of a single segment
15950 * @param {Number} videoTrackId - The trackId of a video track in the segment
15951 * @return {Object.<Number, Object[]>} A mapping of video trackId to
15952 * a list of seiNals found in that track
15953 **/
15954
15955
15956 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
15957 // To get the samples
15958 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
15959
15960 var mdats = findBox_1(segment, ['mdat']);
15961 var captionNals = {};
15962 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
15963
15964 mdats.forEach(function (mdat, index) {
15965 var matchingTraf = trafs[index];
15966 mdatTrafPairs.push({
15967 mdat: mdat,
15968 traf: matchingTraf
15969 });
15970 });
15971 mdatTrafPairs.forEach(function (pair) {
15972 var mdat = pair.mdat;
15973 var traf = pair.traf;
15974 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
15975
15976 var headerInfo = parseTfhd(tfhd[0]);
15977 var trackId = headerInfo.trackId;
15978 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
15979
15980 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
15981 var truns = findBox_1(traf, ['trun']);
15982 var samples;
15983 var seiNals; // Only parse video data for the chosen video track
15984
15985 if (videoTrackId === trackId && truns.length > 0) {
15986 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
15987 seiNals = findSeiNals(mdat, samples, trackId);
15988
15989 if (!captionNals[trackId]) {
15990 captionNals[trackId] = [];
15991 }
15992
15993 captionNals[trackId] = captionNals[trackId].concat(seiNals);
15994 }
15995 });
15996 return captionNals;
15997 };
15998 /**
15999 * Parses out inband captions from an MP4 container and returns
16000 * caption objects that can be used by WebVTT and the TextTrack API.
16001 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
16002 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
16003 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
16004 *
16005 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
16006 * @param {Number} trackId - The id of the video track to parse
16007 * @param {Number} timescale - The timescale for the video track from the init segment
16008 *
16009 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
16010 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
16011 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
16012 * @return {String} parsedCaptions[].text - The visible content of the caption
16013 **/
16014
16015
16016 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
16017 var seiNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
16018
16019 if (trackId === null) {
16020 return null;
16021 }
16022
16023 seiNals = parseCaptionNals(segment, trackId);
16024 return {
16025 seiNals: seiNals[trackId],
16026 timescale: timescale
16027 };
16028 };
16029 /**
16030 * Converts SEI NALUs into captions that can be used by video.js
16031 **/
16032
16033
16034 var CaptionParser = function CaptionParser() {
16035 var isInitialized = false;
16036 var captionStream; // Stores segments seen before trackId and timescale are set
16037
16038 var segmentCache; // Stores video track ID of the track being parsed
16039
16040 var trackId; // Stores the timescale of the track being parsed
16041
16042 var timescale; // Stores captions parsed so far
16043
16044 var parsedCaptions; // Stores whether we are receiving partial data or not
16045
16046 var parsingPartial;
16047 /**
16048 * A method to indicate whether a CaptionParser has been initalized
16049 * @returns {Boolean}
16050 **/
16051
16052 this.isInitialized = function () {
16053 return isInitialized;
16054 };
16055 /**
16056 * Initializes the underlying CaptionStream, SEI NAL parsing
16057 * and management, and caption collection
16058 **/
16059
16060
16061 this.init = function (options) {
16062 captionStream = new CaptionStream();
16063 isInitialized = true;
16064 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
16065
16066 captionStream.on('data', function (event) {
16067 // Convert to seconds in the source's timescale
16068 event.startTime = event.startPts / timescale;
16069 event.endTime = event.endPts / timescale;
16070 parsedCaptions.captions.push(event);
16071 parsedCaptions.captionStreams[event.stream] = true;
16072 });
16073 };
16074 /**
16075 * Determines if a new video track will be selected
16076 * or if the timescale changed
16077 * @return {Boolean}
16078 **/
16079
16080
16081 this.isNewInit = function (videoTrackIds, timescales) {
16082 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
16083 return false;
16084 }
16085
16086 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
16087 };
16088 /**
16089 * Parses out SEI captions and interacts with underlying
16090 * CaptionStream to return dispatched captions
16091 *
16092 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
16093 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
16094 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
16095 * @see parseEmbeddedCaptions
16096 * @see m2ts/caption-stream.js
16097 **/
16098
16099
16100 this.parse = function (segment, videoTrackIds, timescales) {
16101 var parsedData;
16102
16103 if (!this.isInitialized()) {
16104 return null; // This is not likely to be a video segment
16105 } else if (!videoTrackIds || !timescales) {
16106 return null;
16107 } else if (this.isNewInit(videoTrackIds, timescales)) {
16108 // Use the first video track only as there is no
16109 // mechanism to switch to other video tracks
16110 trackId = videoTrackIds[0];
16111 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
16112 // data until we have one.
16113 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
16114 } else if (trackId === null || !timescale) {
16115 segmentCache.push(segment);
16116 return null;
16117 } // Now that a timescale and trackId is set, parse cached segments
16118
16119
16120 while (segmentCache.length > 0) {
16121 var cachedSegment = segmentCache.shift();
16122 this.parse(cachedSegment, videoTrackIds, timescales);
16123 }
16124
16125 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
16126
16127 if (parsedData === null || !parsedData.seiNals) {
16128 return null;
16129 }
16130
16131 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
16132
16133 this.flushStream();
16134 return parsedCaptions;
16135 };
16136 /**
16137 * Pushes SEI NALUs onto CaptionStream
16138 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
16139 * Assumes that `parseCaptionNals` has been called first
16140 * @see m2ts/caption-stream.js
16141 **/
16142
16143
16144 this.pushNals = function (nals) {
16145 if (!this.isInitialized() || !nals || nals.length === 0) {
16146 return null;
16147 }
16148
16149 nals.forEach(function (nal) {
16150 captionStream.push(nal);
16151 });
16152 };
16153 /**
16154 * Flushes underlying CaptionStream to dispatch processed, displayable captions
16155 * @see m2ts/caption-stream.js
16156 **/
16157
16158
16159 this.flushStream = function () {
16160 if (!this.isInitialized()) {
16161 return null;
16162 }
16163
16164 if (!parsingPartial) {
16165 captionStream.flush();
16166 } else {
16167 captionStream.partialFlush();
16168 }
16169 };
16170 /**
16171 * Reset caption buckets for new data
16172 **/
16173
16174
16175 this.clearParsedCaptions = function () {
16176 parsedCaptions.captions = [];
16177 parsedCaptions.captionStreams = {};
16178 };
16179 /**
16180 * Resets underlying CaptionStream
16181 * @see m2ts/caption-stream.js
16182 **/
16183
16184
16185 this.resetCaptionStream = function () {
16186 if (!this.isInitialized()) {
16187 return null;
16188 }
16189
16190 captionStream.reset();
16191 };
16192 /**
16193 * Convenience method to clear all captions flushed from the
16194 * CaptionStream and still being parsed
16195 * @see m2ts/caption-stream.js
16196 **/
16197
16198
16199 this.clearAllCaptions = function () {
16200 this.clearParsedCaptions();
16201 this.resetCaptionStream();
16202 };
16203 /**
16204 * Reset caption parser
16205 **/
16206
16207
16208 this.reset = function () {
16209 segmentCache = [];
16210 trackId = null;
16211 timescale = null;
16212
16213 if (!parsedCaptions) {
16214 parsedCaptions = {
16215 captions: [],
16216 // CC1, CC2, CC3, CC4
16217 captionStreams: {}
16218 };
16219 } else {
16220 this.clearParsedCaptions();
16221 }
16222
16223 this.resetCaptionStream();
16224 };
16225
16226 this.reset();
16227 };
16228
16229 var captionParser = CaptionParser;
16230 /* global self */
16231
16232 var typeFromStreamString = function typeFromStreamString(streamString) {
16233 if (streamString === 'AudioSegmentStream') {
16234 return 'audio';
16235 }
16236
16237 return streamString === 'VideoSegmentStream' ? 'video' : '';
16238 };
16239 /**
16240 * Re-emits transmuxer events by converting them into messages to the
16241 * world outside the worker.
16242 *
16243 * @param {Object} transmuxer the transmuxer to wire events on
16244 * @private
16245 */
16246
16247
16248 var wireFullTransmuxerEvents = function wireFullTransmuxerEvents(self, transmuxer) {
16249 transmuxer.on('data', function (segment) {
16250 // transfer ownership of the underlying ArrayBuffer
16251 // instead of doing a copy to save memory
16252 // ArrayBuffers are transferable but generic TypedArrays are not
16253 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
16254 var initArray = segment.initSegment;
16255 segment.initSegment = {
16256 data: initArray.buffer,
16257 byteOffset: initArray.byteOffset,
16258 byteLength: initArray.byteLength
16259 };
16260 var typedArray = segment.data;
16261 segment.data = typedArray.buffer;
16262 self.postMessage({
16263 action: 'data',
16264 segment: segment,
16265 byteOffset: typedArray.byteOffset,
16266 byteLength: typedArray.byteLength
16267 }, [segment.data]);
16268 });
16269 transmuxer.on('done', function (data) {
16270 self.postMessage({
16271 action: 'done'
16272 });
16273 });
16274 transmuxer.on('gopInfo', function (gopInfo) {
16275 self.postMessage({
16276 action: 'gopInfo',
16277 gopInfo: gopInfo
16278 });
16279 });
16280 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
16281 var videoSegmentTimingInfo = {
16282 start: {
16283 decode: clock.videoTsToSeconds(timingInfo.start.dts),
16284 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
16285 },
16286 end: {
16287 decode: clock.videoTsToSeconds(timingInfo.end.dts),
16288 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
16289 },
16290 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
16291 };
16292
16293 if (timingInfo.prependedContentDuration) {
16294 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
16295 }
16296
16297 self.postMessage({
16298 action: 'videoSegmentTimingInfo',
16299 videoSegmentTimingInfo: videoSegmentTimingInfo
16300 });
16301 });
16302 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
16303 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
16304 var audioSegmentTimingInfo = {
16305 start: {
16306 decode: clock.videoTsToSeconds(timingInfo.start.dts),
16307 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
16308 },
16309 end: {
16310 decode: clock.videoTsToSeconds(timingInfo.end.dts),
16311 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
16312 },
16313 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
16314 };
16315
16316 if (timingInfo.prependedContentDuration) {
16317 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
16318 }
16319
16320 self.postMessage({
16321 action: 'audioSegmentTimingInfo',
16322 audioSegmentTimingInfo: audioSegmentTimingInfo
16323 });
16324 });
16325 transmuxer.on('id3Frame', function (id3Frame) {
16326 self.postMessage({
16327 action: 'id3Frame',
16328 id3Frame: id3Frame
16329 });
16330 });
16331 transmuxer.on('caption', function (caption) {
16332 self.postMessage({
16333 action: 'caption',
16334 caption: caption
16335 });
16336 });
16337 transmuxer.on('trackinfo', function (trackInfo) {
16338 self.postMessage({
16339 action: 'trackinfo',
16340 trackInfo: trackInfo
16341 });
16342 });
16343 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
16344 // convert to video TS since we prioritize video time over audio
16345 self.postMessage({
16346 action: 'audioTimingInfo',
16347 audioTimingInfo: {
16348 start: clock.videoTsToSeconds(audioTimingInfo.start),
16349 end: clock.videoTsToSeconds(audioTimingInfo.end)
16350 }
16351 });
16352 });
16353 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
16354 self.postMessage({
16355 action: 'videoTimingInfo',
16356 videoTimingInfo: {
16357 start: clock.videoTsToSeconds(videoTimingInfo.start),
16358 end: clock.videoTsToSeconds(videoTimingInfo.end)
16359 }
16360 });
16361 });
16362 };
16363
16364 var wirePartialTransmuxerEvents = function wirePartialTransmuxerEvents(self, transmuxer) {
16365 transmuxer.on('data', function (event) {
16366 // transfer ownership of the underlying ArrayBuffer
16367 // instead of doing a copy to save memory
16368 // ArrayBuffers are transferable but generic TypedArrays are not
16369 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
16370 var initSegment = {
16371 data: event.data.track.initSegment.buffer,
16372 byteOffset: event.data.track.initSegment.byteOffset,
16373 byteLength: event.data.track.initSegment.byteLength
16374 };
16375 var boxes = {
16376 data: event.data.boxes.buffer,
16377 byteOffset: event.data.boxes.byteOffset,
16378 byteLength: event.data.boxes.byteLength
16379 };
16380 var segment = {
16381 boxes: boxes,
16382 initSegment: initSegment,
16383 type: event.type,
16384 sequence: event.data.sequence
16385 };
16386
16387 if (typeof event.data.videoFrameDts !== 'undefined') {
16388 segment.videoFrameDtsTime = clock.videoTsToSeconds(event.data.videoFrameDts);
16389 }
16390
16391 if (typeof event.data.videoFramePts !== 'undefined') {
16392 segment.videoFramePtsTime = clock.videoTsToSeconds(event.data.videoFramePts);
16393 }
16394
16395 self.postMessage({
16396 action: 'data',
16397 segment: segment
16398 }, [segment.boxes.data, segment.initSegment.data]);
16399 });
16400 transmuxer.on('id3Frame', function (id3Frame) {
16401 self.postMessage({
16402 action: 'id3Frame',
16403 id3Frame: id3Frame
16404 });
16405 });
16406 transmuxer.on('caption', function (caption) {
16407 self.postMessage({
16408 action: 'caption',
16409 caption: caption
16410 });
16411 });
16412 transmuxer.on('done', function (data) {
16413 self.postMessage({
16414 action: 'done',
16415 type: typeFromStreamString(data)
16416 });
16417 });
16418 transmuxer.on('partialdone', function (data) {
16419 self.postMessage({
16420 action: 'partialdone',
16421 type: typeFromStreamString(data)
16422 });
16423 });
16424 transmuxer.on('endedsegment', function (data) {
16425 self.postMessage({
16426 action: 'endedSegment',
16427 type: typeFromStreamString(data)
16428 });
16429 });
16430 transmuxer.on('trackinfo', function (trackInfo) {
16431 self.postMessage({
16432 action: 'trackinfo',
16433 trackInfo: trackInfo
16434 });
16435 });
16436 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
16437 // This can happen if flush is called when no
16438 // audio has been processed. This should be an
16439 // unusual case, but if it does occur should not
16440 // result in valid data being returned
16441 if (audioTimingInfo.start === null) {
16442 self.postMessage({
16443 action: 'audioTimingInfo',
16444 audioTimingInfo: audioTimingInfo
16445 });
16446 return;
16447 } // convert to video TS since we prioritize video time over audio
16448
16449
16450 var timingInfoInSeconds = {
16451 start: clock.videoTsToSeconds(audioTimingInfo.start)
16452 };
16453
16454 if (audioTimingInfo.end) {
16455 timingInfoInSeconds.end = clock.videoTsToSeconds(audioTimingInfo.end);
16456 }
16457
16458 self.postMessage({
16459 action: 'audioTimingInfo',
16460 audioTimingInfo: timingInfoInSeconds
16461 });
16462 });
16463 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
16464 var timingInfoInSeconds = {
16465 start: clock.videoTsToSeconds(videoTimingInfo.start)
16466 };
16467
16468 if (videoTimingInfo.end) {
16469 timingInfoInSeconds.end = clock.videoTsToSeconds(videoTimingInfo.end);
16470 }
16471
16472 self.postMessage({
16473 action: 'videoTimingInfo',
16474 videoTimingInfo: timingInfoInSeconds
16475 });
16476 });
16477 };
16478 /**
16479 * All incoming messages route through this hash. If no function exists
16480 * to handle an incoming message, then we ignore the message.
16481 *
16482 * @class MessageHandlers
16483 * @param {Object} options the options to initialize with
16484 */
16485
16486
16487 var MessageHandlers = /*#__PURE__*/function () {
16488 function MessageHandlers(self, options) {
16489 this.options = options || {};
16490 this.self = self;
16491 this.init();
16492 }
16493 /**
16494 * initialize our web worker and wire all the events.
16495 */
16496
16497
16498 var _proto = MessageHandlers.prototype;
16499
16500 _proto.init = function init() {
16501 if (this.transmuxer) {
16502 this.transmuxer.dispose();
16503 }
16504
16505 this.transmuxer = this.options.handlePartialData ? new transmuxer(this.options) : new transmuxer$1.Transmuxer(this.options);
16506
16507 if (this.options.handlePartialData) {
16508 wirePartialTransmuxerEvents(this.self, this.transmuxer);
16509 } else {
16510 wireFullTransmuxerEvents(this.self, this.transmuxer);
16511 }
16512 };
16513
16514 _proto.pushMp4Captions = function pushMp4Captions(data) {
16515 if (!this.captionParser) {
16516 this.captionParser = new captionParser();
16517 this.captionParser.init();
16518 }
16519
16520 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
16521 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
16522 this.self.postMessage({
16523 action: 'mp4Captions',
16524 captions: parsed && parsed.captions || [],
16525 data: segment.buffer
16526 }, [segment.buffer]);
16527 };
16528
16529 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
16530 if (this.captionParser) {
16531 this.captionParser.clearAllCaptions();
16532 }
16533 };
16534
16535 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
16536 if (this.captionParser) {
16537 this.captionParser.clearParsedCaptions();
16538 }
16539 }
16540 /**
16541 * Adds data (a ts segment) to the start of the transmuxer pipeline for
16542 * processing.
16543 *
16544 * @param {ArrayBuffer} data data to push into the muxer
16545 */
16546 ;
16547
16548 _proto.push = function push(data) {
16549 // Cast array buffer to correct type for transmuxer
16550 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
16551 this.transmuxer.push(segment);
16552 }
16553 /**
16554 * Recreate the transmuxer so that the next segment added via `push`
16555 * start with a fresh transmuxer.
16556 */
16557 ;
16558
16559 _proto.reset = function reset() {
16560 this.transmuxer.reset();
16561 }
16562 /**
16563 * Set the value that will be used as the `baseMediaDecodeTime` time for the
16564 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
16565 * set relative to the first based on the PTS values.
16566 *
16567 * @param {Object} data used to set the timestamp offset in the muxer
16568 */
16569 ;
16570
16571 _proto.setTimestampOffset = function setTimestampOffset(data) {
16572 var timestampOffset = data.timestampOffset || 0;
16573 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
16574 };
16575
16576 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
16577 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
16578 };
16579
16580 _proto.setRemux = function setRemux(data) {
16581 this.transmuxer.setRemux(data.remux);
16582 }
16583 /**
16584 * Forces the pipeline to finish processing the last segment and emit it's
16585 * results.
16586 *
16587 * @param {Object} data event data, not really used
16588 */
16589 ;
16590
16591 _proto.flush = function flush(data) {
16592 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
16593
16594 self.postMessage({
16595 action: 'done',
16596 type: 'transmuxed'
16597 });
16598 };
16599
16600 _proto.partialFlush = function partialFlush(data) {
16601 this.transmuxer.partialFlush(); // transmuxed partialdone action is fired after both audio/video pipelines are flushed
16602
16603 self.postMessage({
16604 action: 'partialdone',
16605 type: 'transmuxed'
16606 });
16607 };
16608
16609 _proto.endTimeline = function endTimeline() {
16610 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
16611 // timelines
16612
16613 self.postMessage({
16614 action: 'endedtimeline',
16615 type: 'transmuxed'
16616 });
16617 };
16618
16619 _proto.alignGopsWith = function alignGopsWith(data) {
16620 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
16621 };
16622
16623 return MessageHandlers;
16624 }();
16625 /**
16626 * Our web worker interface so that things can talk to mux.js
16627 * that will be running in a web worker. the scope is passed to this by
16628 * webworkify.
16629 *
16630 * @param {Object} self the scope for the web worker
16631 */
16632
16633
16634 self.onmessage = function (event) {
16635 if (event.data.action === 'init' && event.data.options) {
16636 this.messageHandlers = new MessageHandlers(self, event.data.options);
16637 return;
16638 }
16639
16640 if (!this.messageHandlers) {
16641 this.messageHandlers = new MessageHandlers(self);
16642 }
16643
16644 if (event.data && event.data.action && event.data.action !== 'init') {
16645 if (this.messageHandlers[event.data.action]) {
16646 this.messageHandlers[event.data.action](event.data);
16647 }
16648 }
16649 };
16650 }));
16651 var TransmuxWorker = factory(workerCode$1);
16652 /* rollup-plugin-worker-factory end for worker!/Users/gkatsevman/p/http-streaming-release/src/transmuxer-worker.js */
16653
16654 var handleData_ = function handleData_(event, transmuxedData, callback) {
16655 var _event$data$segment = event.data.segment,
16656 type = _event$data$segment.type,
16657 initSegment = _event$data$segment.initSegment,
16658 captions = _event$data$segment.captions,
16659 captionStreams = _event$data$segment.captionStreams,
16660 metadata = _event$data$segment.metadata,
16661 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
16662 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
16663 transmuxedData.buffer.push({
16664 captions: captions,
16665 captionStreams: captionStreams,
16666 metadata: metadata
16667 }); // right now, boxes will come back from partial transmuxer, data from full
16668
16669 var boxes = event.data.segment.boxes || {
16670 data: event.data.segment.data
16671 };
16672 var result = {
16673 type: type,
16674 // cast ArrayBuffer to TypedArray
16675 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
16676 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
16677 };
16678
16679 if (typeof videoFrameDtsTime !== 'undefined') {
16680 result.videoFrameDtsTime = videoFrameDtsTime;
16681 }
16682
16683 if (typeof videoFramePtsTime !== 'undefined') {
16684 result.videoFramePtsTime = videoFramePtsTime;
16685 }
16686
16687 callback(result);
16688 };
16689 var handleDone_ = function handleDone_(_ref) {
16690 var transmuxedData = _ref.transmuxedData,
16691 callback = _ref.callback;
16692 // Previously we only returned data on data events,
16693 // not on done events. Clear out the buffer to keep that consistent.
16694 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
16695 // have received
16696
16697 callback(transmuxedData);
16698 };
16699 var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
16700 transmuxedData.gopInfo = event.data.gopInfo;
16701 };
16702 var processTransmux = function processTransmux(options) {
16703 var transmuxer = options.transmuxer,
16704 bytes = options.bytes,
16705 audioAppendStart = options.audioAppendStart,
16706 gopsToAlignWith = options.gopsToAlignWith,
16707 isPartial = options.isPartial,
16708 remux = options.remux,
16709 onData = options.onData,
16710 onTrackInfo = options.onTrackInfo,
16711 onAudioTimingInfo = options.onAudioTimingInfo,
16712 onVideoTimingInfo = options.onVideoTimingInfo,
16713 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
16714 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
16715 onId3 = options.onId3,
16716 onCaptions = options.onCaptions,
16717 onDone = options.onDone,
16718 onEndedTimeline = options.onEndedTimeline,
16719 isEndOfTimeline = options.isEndOfTimeline;
16720 var transmuxedData = {
16721 isPartial: isPartial,
16722 buffer: []
16723 };
16724 var waitForEndedTimelineEvent = isEndOfTimeline;
16725
16726 var handleMessage = function handleMessage(event) {
16727 if (transmuxer.currentTransmux !== options) {
16728 // disposed
16729 return;
16730 }
16731
16732 if (event.data.action === 'data') {
16733 handleData_(event, transmuxedData, onData);
16734 }
16735
16736 if (event.data.action === 'trackinfo') {
16737 onTrackInfo(event.data.trackInfo);
16738 }
16739
16740 if (event.data.action === 'gopInfo') {
16741 handleGopInfo_(event, transmuxedData);
16742 }
16743
16744 if (event.data.action === 'audioTimingInfo') {
16745 onAudioTimingInfo(event.data.audioTimingInfo);
16746 }
16747
16748 if (event.data.action === 'videoTimingInfo') {
16749 onVideoTimingInfo(event.data.videoTimingInfo);
16750 }
16751
16752 if (event.data.action === 'videoSegmentTimingInfo') {
16753 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
16754 }
16755
16756 if (event.data.action === 'audioSegmentTimingInfo') {
16757 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
16758 }
16759
16760 if (event.data.action === 'id3Frame') {
16761 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
16762 }
16763
16764 if (event.data.action === 'caption') {
16765 onCaptions(event.data.caption);
16766 }
16767
16768 if (event.data.action === 'endedtimeline') {
16769 waitForEndedTimelineEvent = false;
16770 onEndedTimeline();
16771 } // wait for the transmuxed event since we may have audio and video
16772
16773
16774 if (event.data.type !== 'transmuxed') {
16775 return;
16776 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
16777 // of a timeline, that means there may still be data events before the segment
16778 // processing can be considerred complete. In that case, the final event should be
16779 // an "endedtimeline" event with the type "transmuxed."
16780
16781
16782 if (waitForEndedTimelineEvent) {
16783 return;
16784 }
16785
16786 transmuxer.onmessage = null;
16787 handleDone_({
16788 transmuxedData: transmuxedData,
16789 callback: onDone
16790 });
16791 /* eslint-disable no-use-before-define */
16792
16793 dequeue(transmuxer);
16794 /* eslint-enable */
16795 };
16796
16797 transmuxer.onmessage = handleMessage;
16798
16799 if (audioAppendStart) {
16800 transmuxer.postMessage({
16801 action: 'setAudioAppendStart',
16802 appendStart: audioAppendStart
16803 });
16804 } // allow empty arrays to be passed to clear out GOPs
16805
16806
16807 if (Array.isArray(gopsToAlignWith)) {
16808 transmuxer.postMessage({
16809 action: 'alignGopsWith',
16810 gopsToAlignWith: gopsToAlignWith
16811 });
16812 }
16813
16814 if (typeof remux !== 'undefined') {
16815 transmuxer.postMessage({
16816 action: 'setRemux',
16817 remux: remux
16818 });
16819 }
16820
16821 if (bytes.byteLength) {
16822 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
16823 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
16824 transmuxer.postMessage({
16825 action: 'push',
16826 // Send the typed-array of data as an ArrayBuffer so that
16827 // it can be sent as a "Transferable" and avoid the costly
16828 // memory copy
16829 data: buffer,
16830 // To recreate the original typed-array, we need information
16831 // about what portion of the ArrayBuffer it was a view into
16832 byteOffset: byteOffset,
16833 byteLength: bytes.byteLength
16834 }, [buffer]);
16835 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
16836 // the end of the segment
16837
16838
16839 transmuxer.postMessage({
16840 action: isPartial ? 'partialFlush' : 'flush'
16841 });
16842
16843 if (isEndOfTimeline) {
16844 transmuxer.postMessage({
16845 action: 'endTimeline'
16846 });
16847 }
16848 };
16849 var dequeue = function dequeue(transmuxer) {
16850 transmuxer.currentTransmux = null;
16851
16852 if (transmuxer.transmuxQueue.length) {
16853 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
16854
16855 if (typeof transmuxer.currentTransmux === 'function') {
16856 transmuxer.currentTransmux();
16857 } else {
16858 processTransmux(transmuxer.currentTransmux);
16859 }
16860 }
16861 };
16862 var processAction = function processAction(transmuxer, action) {
16863 transmuxer.postMessage({
16864 action: action
16865 });
16866 dequeue(transmuxer);
16867 };
16868 var enqueueAction = function enqueueAction(action, transmuxer) {
16869 if (!transmuxer.currentTransmux) {
16870 transmuxer.currentTransmux = action;
16871 processAction(transmuxer, action);
16872 return;
16873 }
16874
16875 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
16876 };
16877 var reset = function reset(transmuxer) {
16878 enqueueAction('reset', transmuxer);
16879 };
16880 var endTimeline = function endTimeline(transmuxer) {
16881 enqueueAction('endTimeline', transmuxer);
16882 };
16883 var transmux = function transmux(options) {
16884 if (!options.transmuxer.currentTransmux) {
16885 options.transmuxer.currentTransmux = options;
16886 processTransmux(options);
16887 return;
16888 }
16889
16890 options.transmuxer.transmuxQueue.push(options);
16891 };
16892 var createTransmuxer = function createTransmuxer(options) {
16893 var transmuxer = new TransmuxWorker();
16894 transmuxer.currentTransmux = null;
16895 transmuxer.transmuxQueue = [];
16896 var term = transmuxer.terminate;
16897
16898 transmuxer.terminate = function () {
16899 transmuxer.currentTransmux = null;
16900 transmuxer.transmuxQueue.length = 0;
16901 return term.call(transmuxer);
16902 };
16903
16904 transmuxer.postMessage({
16905 action: 'init',
16906 options: options
16907 });
16908 return transmuxer;
16909 };
16910 var segmentTransmuxer = {
16911 reset: reset,
16912 endTimeline: endTimeline,
16913 transmux: transmux,
16914 createTransmuxer: createTransmuxer
16915 };
16916
16917 /**
16918 * mux.js
16919 *
16920 * Copyright (c) Brightcove
16921 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16922 */
16923
16924 var streamTypes = {
16925 H264_STREAM_TYPE: 0x1B,
16926 ADTS_STREAM_TYPE: 0x0F,
16927 METADATA_STREAM_TYPE: 0x15
16928 };
16929
16930 /**
16931 * mux.js
16932 *
16933 * Copyright (c) Brightcove
16934 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16935 *
16936 * A lightweight readable stream implemention that handles event dispatching.
16937 * Objects that inherit from streams should call init in their constructors.
16938 */
16939
16940 var Stream = function Stream() {
16941 this.init = function () {
16942 var listeners = {};
16943 /**
16944 * Add a listener for a specified event type.
16945 * @param type {string} the event name
16946 * @param listener {function} the callback to be invoked when an event of
16947 * the specified type occurs
16948 */
16949
16950 this.on = function (type, listener) {
16951 if (!listeners[type]) {
16952 listeners[type] = [];
16953 }
16954
16955 listeners[type] = listeners[type].concat(listener);
16956 };
16957 /**
16958 * Remove a listener for a specified event type.
16959 * @param type {string} the event name
16960 * @param listener {function} a function previously registered for this
16961 * type of event through `on`
16962 */
16963
16964
16965 this.off = function (type, listener) {
16966 var index;
16967
16968 if (!listeners[type]) {
16969 return false;
16970 }
16971
16972 index = listeners[type].indexOf(listener);
16973 listeners[type] = listeners[type].slice();
16974 listeners[type].splice(index, 1);
16975 return index > -1;
16976 };
16977 /**
16978 * Trigger an event of the specified type on this stream. Any additional
16979 * arguments to this function are passed as parameters to event listeners.
16980 * @param type {string} the event name
16981 */
16982
16983
16984 this.trigger = function (type) {
16985 var callbacks, i, length, args;
16986 callbacks = listeners[type];
16987
16988 if (!callbacks) {
16989 return;
16990 } // Slicing the arguments on every invocation of this method
16991 // can add a significant amount of overhead. Avoid the
16992 // intermediate object creation for the common case of a
16993 // single callback argument
16994
16995
16996 if (arguments.length === 2) {
16997 length = callbacks.length;
16998
16999 for (i = 0; i < length; ++i) {
17000 callbacks[i].call(this, arguments[1]);
17001 }
17002 } else {
17003 args = [];
17004 i = arguments.length;
17005
17006 for (i = 1; i < arguments.length; ++i) {
17007 args.push(arguments[i]);
17008 }
17009
17010 length = callbacks.length;
17011
17012 for (i = 0; i < length; ++i) {
17013 callbacks[i].apply(this, args);
17014 }
17015 }
17016 };
17017 /**
17018 * Destroys the stream and cleans up.
17019 */
17020
17021
17022 this.dispose = function () {
17023 listeners = {};
17024 };
17025 };
17026 };
17027 /**
17028 * Forwards all `data` events on this stream to the destination stream. The
17029 * destination stream should provide a method `push` to receive the data
17030 * events as they arrive.
17031 * @param destination {stream} the stream that will receive all `data` events
17032 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
17033 * when the current stream emits a 'done' event
17034 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
17035 */
17036
17037
17038 Stream.prototype.pipe = function (destination) {
17039 this.on('data', function (data) {
17040 destination.push(data);
17041 });
17042 this.on('done', function (flushSource) {
17043 destination.flush(flushSource);
17044 });
17045 this.on('partialdone', function (flushSource) {
17046 destination.partialFlush(flushSource);
17047 });
17048 this.on('endedtimeline', function (flushSource) {
17049 destination.endTimeline(flushSource);
17050 });
17051 this.on('reset', function (flushSource) {
17052 destination.reset(flushSource);
17053 });
17054 return destination;
17055 }; // Default stream functions that are expected to be overridden to perform
17056 // actual work. These are provided by the prototype as a sort of no-op
17057 // implementation so that we don't have to check for their existence in the
17058 // `pipe` function above.
17059
17060
17061 Stream.prototype.push = function (data) {
17062 this.trigger('data', data);
17063 };
17064
17065 Stream.prototype.flush = function (flushSource) {
17066 this.trigger('done', flushSource);
17067 };
17068
17069 Stream.prototype.partialFlush = function (flushSource) {
17070 this.trigger('partialdone', flushSource);
17071 };
17072
17073 Stream.prototype.endTimeline = function (flushSource) {
17074 this.trigger('endedtimeline', flushSource);
17075 };
17076
17077 Stream.prototype.reset = function (flushSource) {
17078 this.trigger('reset', flushSource);
17079 };
17080
17081 var stream = Stream;
17082
17083 var MAX_TS = 8589934592;
17084 var RO_THRESH = 4294967296;
17085 var TYPE_SHARED = 'shared';
17086
17087 var handleRollover$1 = function handleRollover(value, reference) {
17088 var direction = 1;
17089
17090 if (value > reference) {
17091 // If the current timestamp value is greater than our reference timestamp and we detect a
17092 // timestamp rollover, this means the roll over is happening in the opposite direction.
17093 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
17094 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
17095 // rollover point. In loading this segment, the timestamp values will be very large,
17096 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
17097 // the time stamp to be `value - 2^33`.
17098 direction = -1;
17099 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
17100 // cause an incorrect adjustment.
17101
17102
17103 while (Math.abs(reference - value) > RO_THRESH) {
17104 value += direction * MAX_TS;
17105 }
17106
17107 return value;
17108 };
17109
17110 var TimestampRolloverStream = function TimestampRolloverStream(type) {
17111 var lastDTS, referenceDTS;
17112 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
17113 // video and audio. We could use `undefined` here, but having a string
17114 // makes debugging a little clearer.
17115
17116 this.type_ = type || TYPE_SHARED;
17117
17118 this.push = function (data) {
17119 // Any "shared" rollover streams will accept _all_ data. Otherwise,
17120 // streams will only accept data that matches their type.
17121 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
17122 return;
17123 }
17124
17125 if (referenceDTS === undefined) {
17126 referenceDTS = data.dts;
17127 }
17128
17129 data.dts = handleRollover$1(data.dts, referenceDTS);
17130 data.pts = handleRollover$1(data.pts, referenceDTS);
17131 lastDTS = data.dts;
17132 this.trigger('data', data);
17133 };
17134
17135 this.flush = function () {
17136 referenceDTS = lastDTS;
17137 this.trigger('done');
17138 };
17139
17140 this.endTimeline = function () {
17141 this.flush();
17142 this.trigger('endedtimeline');
17143 };
17144
17145 this.discontinuity = function () {
17146 referenceDTS = void 0;
17147 lastDTS = void 0;
17148 };
17149
17150 this.reset = function () {
17151 this.discontinuity();
17152 this.trigger('reset');
17153 };
17154 };
17155
17156 TimestampRolloverStream.prototype = new stream();
17157 var timestampRolloverStream = {
17158 TimestampRolloverStream: TimestampRolloverStream,
17159 handleRollover: handleRollover$1
17160 };
17161
17162 var parsePid = function parsePid(packet) {
17163 var pid = packet[1] & 0x1f;
17164 pid <<= 8;
17165 pid |= packet[2];
17166 return pid;
17167 };
17168
17169 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
17170 return !!(packet[1] & 0x40);
17171 };
17172
17173 var parseAdaptionField = function parseAdaptionField(packet) {
17174 var offset = 0; // if an adaption field is present, its length is specified by the
17175 // fifth byte of the TS packet header. The adaptation field is
17176 // used to add stuffing to PES packets that don't fill a complete
17177 // TS packet, and to specify some forms of timing and control data
17178 // that we do not currently use.
17179
17180 if ((packet[3] & 0x30) >>> 4 > 0x01) {
17181 offset += packet[4] + 1;
17182 }
17183
17184 return offset;
17185 };
17186
17187 var parseType$2 = function parseType(packet, pmtPid) {
17188 var pid = parsePid(packet);
17189
17190 if (pid === 0) {
17191 return 'pat';
17192 } else if (pid === pmtPid) {
17193 return 'pmt';
17194 } else if (pmtPid) {
17195 return 'pes';
17196 }
17197
17198 return null;
17199 };
17200
17201 var parsePat = function parsePat(packet) {
17202 var pusi = parsePayloadUnitStartIndicator(packet);
17203 var offset = 4 + parseAdaptionField(packet);
17204
17205 if (pusi) {
17206 offset += packet[offset] + 1;
17207 }
17208
17209 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
17210 };
17211
17212 var parsePmt = function parsePmt(packet) {
17213 var programMapTable = {};
17214 var pusi = parsePayloadUnitStartIndicator(packet);
17215 var payloadOffset = 4 + parseAdaptionField(packet);
17216
17217 if (pusi) {
17218 payloadOffset += packet[payloadOffset] + 1;
17219 } // PMTs can be sent ahead of the time when they should actually
17220 // take effect. We don't believe this should ever be the case
17221 // for HLS but we'll ignore "forward" PMT declarations if we see
17222 // them. Future PMT declarations have the current_next_indicator
17223 // set to zero.
17224
17225
17226 if (!(packet[payloadOffset + 5] & 0x01)) {
17227 return;
17228 }
17229
17230 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
17231
17232 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
17233 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
17234 // long the program info descriptors are
17235
17236 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
17237
17238 var offset = 12 + programInfoLength;
17239
17240 while (offset < tableEnd) {
17241 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
17242
17243 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
17244 // skip past the elementary stream descriptors, if present
17245
17246 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
17247 }
17248
17249 return programMapTable;
17250 };
17251
17252 var parsePesType = function parsePesType(packet, programMapTable) {
17253 var pid = parsePid(packet);
17254 var type = programMapTable[pid];
17255
17256 switch (type) {
17257 case streamTypes.H264_STREAM_TYPE:
17258 return 'video';
17259
17260 case streamTypes.ADTS_STREAM_TYPE:
17261 return 'audio';
17262
17263 case streamTypes.METADATA_STREAM_TYPE:
17264 return 'timed-metadata';
17265
17266 default:
17267 return null;
17268 }
17269 };
17270
17271 var parsePesTime = function parsePesTime(packet) {
17272 var pusi = parsePayloadUnitStartIndicator(packet);
17273
17274 if (!pusi) {
17275 return null;
17276 }
17277
17278 var offset = 4 + parseAdaptionField(packet);
17279
17280 if (offset >= packet.byteLength) {
17281 // From the H 222.0 MPEG-TS spec
17282 // "For transport stream packets carrying PES packets, stuffing is needed when there
17283 // is insufficient PES packet data to completely fill the transport stream packet
17284 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
17285 // the sum of the lengths of the data elements in it, so that the payload bytes
17286 // remaining after the adaptation field exactly accommodates the available PES packet
17287 // data."
17288 //
17289 // If the offset is >= the length of the packet, then the packet contains no data
17290 // and instead is just adaption field stuffing bytes
17291 return null;
17292 }
17293
17294 var pes = null;
17295 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
17296 // and a DTS value. Determine what combination of values is
17297 // available to work with.
17298
17299 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
17300 // performs all bitwise operations on 32-bit integers but javascript
17301 // supports a much greater range (52-bits) of integer using standard
17302 // mathematical operations.
17303 // We construct a 31-bit value using bitwise operators over the 31
17304 // most significant bits and then multiply by 4 (equal to a left-shift
17305 // of 2) before we add the final 2 least significant bits of the
17306 // timestamp (equal to an OR.)
17307
17308 if (ptsDtsFlags & 0xC0) {
17309 pes = {}; // the PTS and DTS are not written out directly. For information
17310 // on how they are encoded, see
17311 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
17312
17313 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
17314 pes.pts *= 4; // Left shift by 2
17315
17316 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
17317
17318 pes.dts = pes.pts;
17319
17320 if (ptsDtsFlags & 0x40) {
17321 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
17322 pes.dts *= 4; // Left shift by 2
17323
17324 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
17325 }
17326 }
17327
17328 return pes;
17329 };
17330
17331 var parseNalUnitType = function parseNalUnitType(type) {
17332 switch (type) {
17333 case 0x05:
17334 return 'slice_layer_without_partitioning_rbsp_idr';
17335
17336 case 0x06:
17337 return 'sei_rbsp';
17338
17339 case 0x07:
17340 return 'seq_parameter_set_rbsp';
17341
17342 case 0x08:
17343 return 'pic_parameter_set_rbsp';
17344
17345 case 0x09:
17346 return 'access_unit_delimiter_rbsp';
17347
17348 default:
17349 return null;
17350 }
17351 };
17352
17353 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
17354 var offset = 4 + parseAdaptionField(packet);
17355 var frameBuffer = packet.subarray(offset);
17356 var frameI = 0;
17357 var frameSyncPoint = 0;
17358 var foundKeyFrame = false;
17359 var nalType; // advance the sync point to a NAL start, if necessary
17360
17361 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
17362 if (frameBuffer[frameSyncPoint + 2] === 1) {
17363 // the sync point is properly aligned
17364 frameI = frameSyncPoint + 5;
17365 break;
17366 }
17367 }
17368
17369 while (frameI < frameBuffer.byteLength) {
17370 // look at the current byte to determine if we've hit the end of
17371 // a NAL unit boundary
17372 switch (frameBuffer[frameI]) {
17373 case 0:
17374 // skip past non-sync sequences
17375 if (frameBuffer[frameI - 1] !== 0) {
17376 frameI += 2;
17377 break;
17378 } else if (frameBuffer[frameI - 2] !== 0) {
17379 frameI++;
17380 break;
17381 }
17382
17383 if (frameSyncPoint + 3 !== frameI - 2) {
17384 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17385
17386 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17387 foundKeyFrame = true;
17388 }
17389 } // drop trailing zeroes
17390
17391
17392 do {
17393 frameI++;
17394 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
17395
17396 frameSyncPoint = frameI - 2;
17397 frameI += 3;
17398 break;
17399
17400 case 1:
17401 // skip past non-sync sequences
17402 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
17403 frameI += 3;
17404 break;
17405 }
17406
17407 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17408
17409 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17410 foundKeyFrame = true;
17411 }
17412
17413 frameSyncPoint = frameI - 2;
17414 frameI += 3;
17415 break;
17416
17417 default:
17418 // the current byte isn't a one or zero, so it cannot be part
17419 // of a sync sequence
17420 frameI += 3;
17421 break;
17422 }
17423 }
17424
17425 frameBuffer = frameBuffer.subarray(frameSyncPoint);
17426 frameI -= frameSyncPoint;
17427 frameSyncPoint = 0; // parse the final nal
17428
17429 if (frameBuffer && frameBuffer.byteLength > 3) {
17430 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17431
17432 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17433 foundKeyFrame = true;
17434 }
17435 }
17436
17437 return foundKeyFrame;
17438 };
17439
17440 var probe$2 = {
17441 parseType: parseType$2,
17442 parsePat: parsePat,
17443 parsePmt: parsePmt,
17444 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
17445 parsePesType: parsePesType,
17446 parsePesTime: parsePesTime,
17447 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
17448 };
17449
17450 /**
17451 * mux.js
17452 *
17453 * Copyright (c) Brightcove
17454 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
17455 *
17456 * Utilities to detect basic properties and metadata about Aac data.
17457 */
17458
17459 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
17460
17461 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
17462 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
17463 flags = header[byteIndex + 5],
17464 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
17465
17466 returnSize = returnSize >= 0 ? returnSize : 0;
17467
17468 if (footerPresent) {
17469 return returnSize + 20;
17470 }
17471
17472 return returnSize + 10;
17473 };
17474
17475 var getId3Offset = function getId3Offset(data, offset) {
17476 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
17477 return offset;
17478 }
17479
17480 offset += parseId3TagSize(data, offset);
17481 return getId3Offset(data, offset);
17482 }; // TODO: use vhs-utils
17483
17484
17485 var isLikelyAacData = function isLikelyAacData(data) {
17486 var offset = getId3Offset(data, 0);
17487 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
17488 // is not mp3 data but aac data.
17489 (data[offset + 1] & 0x16) === 0x10;
17490 };
17491
17492 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
17493 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
17494 }; // return a percent-encoded representation of the specified byte range
17495 // @see http://en.wikipedia.org/wiki/Percent-encoding
17496
17497
17498 var percentEncode = function percentEncode(bytes, start, end) {
17499 var i,
17500 result = '';
17501
17502 for (i = start; i < end; i++) {
17503 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
17504 }
17505
17506 return result;
17507 }; // return the string representation of the specified byte range,
17508 // interpreted as ISO-8859-1.
17509
17510
17511 var parseIso88591 = function parseIso88591(bytes, start, end) {
17512 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
17513 };
17514
17515 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
17516 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
17517 middle = header[byteIndex + 4] << 3,
17518 highTwo = header[byteIndex + 3] & 0x3 << 11;
17519 return highTwo | middle | lowThree;
17520 };
17521
17522 var parseType$1 = function parseType(header, byteIndex) {
17523 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
17524 return 'timed-metadata';
17525 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
17526 return 'audio';
17527 }
17528
17529 return null;
17530 };
17531
17532 var parseSampleRate = function parseSampleRate(packet) {
17533 var i = 0;
17534
17535 while (i + 5 < packet.length) {
17536 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
17537 // If a valid header was not found, jump one forward and attempt to
17538 // find a valid ADTS header starting at the next byte
17539 i++;
17540 continue;
17541 }
17542
17543 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
17544 }
17545
17546 return null;
17547 };
17548
17549 var parseAacTimestamp = function parseAacTimestamp(packet) {
17550 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
17551
17552 frameStart = 10;
17553
17554 if (packet[5] & 0x40) {
17555 // advance the frame start past the extended header
17556 frameStart += 4; // header size field
17557
17558 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
17559 } // parse one or more ID3 frames
17560 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
17561
17562
17563 do {
17564 // determine the number of bytes in this frame
17565 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
17566
17567 if (frameSize < 1) {
17568 return null;
17569 }
17570
17571 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
17572
17573 if (frameHeader === 'PRIV') {
17574 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
17575
17576 for (var i = 0; i < frame.byteLength; i++) {
17577 if (frame[i] === 0) {
17578 var owner = parseIso88591(frame, 0, i);
17579
17580 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
17581 var d = frame.subarray(i + 1);
17582 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
17583 size *= 4;
17584 size += d[7] & 0x03;
17585 return size;
17586 }
17587
17588 break;
17589 }
17590 }
17591 }
17592
17593 frameStart += 10; // advance past the frame header
17594
17595 frameStart += frameSize; // advance past the frame body
17596 } while (frameStart < packet.byteLength);
17597
17598 return null;
17599 };
17600
17601 var utils = {
17602 isLikelyAacData: isLikelyAacData,
17603 parseId3TagSize: parseId3TagSize,
17604 parseAdtsSize: parseAdtsSize,
17605 parseType: parseType$1,
17606 parseSampleRate: parseSampleRate,
17607 parseAacTimestamp: parseAacTimestamp
17608 };
17609
17610 /**
17611 * mux.js
17612 *
17613 * Copyright (c) Brightcove
17614 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
17615 */
17616 var ONE_SECOND_IN_TS$1 = 90000,
17617 // 90kHz clock
17618 secondsToVideoTs,
17619 secondsToAudioTs,
17620 videoTsToSeconds,
17621 audioTsToSeconds,
17622 audioTsToVideoTs,
17623 videoTsToAudioTs,
17624 metadataTsToSeconds;
17625
17626 secondsToVideoTs = function secondsToVideoTs(seconds) {
17627 return seconds * ONE_SECOND_IN_TS$1;
17628 };
17629
17630 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
17631 return seconds * sampleRate;
17632 };
17633
17634 videoTsToSeconds = function videoTsToSeconds(timestamp) {
17635 return timestamp / ONE_SECOND_IN_TS$1;
17636 };
17637
17638 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
17639 return timestamp / sampleRate;
17640 };
17641
17642 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
17643 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
17644 };
17645
17646 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
17647 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
17648 };
17649 /**
17650 * Adjust ID3 tag or caption timing information by the timeline pts values
17651 * (if keepOriginalTimestamps is false) and convert to seconds
17652 */
17653
17654
17655 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
17656 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
17657 };
17658
17659 var clock = {
17660 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$1,
17661 secondsToVideoTs: secondsToVideoTs,
17662 secondsToAudioTs: secondsToAudioTs,
17663 videoTsToSeconds: videoTsToSeconds,
17664 audioTsToSeconds: audioTsToSeconds,
17665 audioTsToVideoTs: audioTsToVideoTs,
17666 videoTsToAudioTs: videoTsToAudioTs,
17667 metadataTsToSeconds: metadataTsToSeconds
17668 };
17669
17670 var handleRollover = timestampRolloverStream.handleRollover;
17671 var probe$1 = {};
17672 probe$1.ts = probe$2;
17673 probe$1.aac = utils;
17674 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
17675 var MP2T_PACKET_LENGTH = 188,
17676 // bytes
17677 SYNC_BYTE = 0x47;
17678 /**
17679 * walks through segment data looking for pat and pmt packets to parse out
17680 * program map table information
17681 */
17682
17683 var parsePsi_ = function parsePsi_(bytes, pmt) {
17684 var startIndex = 0,
17685 endIndex = MP2T_PACKET_LENGTH,
17686 packet,
17687 type;
17688
17689 while (endIndex < bytes.byteLength) {
17690 // Look for a pair of start and end sync bytes in the data..
17691 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17692 // We found a packet
17693 packet = bytes.subarray(startIndex, endIndex);
17694 type = probe$1.ts.parseType(packet, pmt.pid);
17695
17696 switch (type) {
17697 case 'pat':
17698 if (!pmt.pid) {
17699 pmt.pid = probe$1.ts.parsePat(packet);
17700 }
17701
17702 break;
17703
17704 case 'pmt':
17705 if (!pmt.table) {
17706 pmt.table = probe$1.ts.parsePmt(packet);
17707 }
17708
17709 break;
17710 } // Found the pat and pmt, we can stop walking the segment
17711
17712
17713 if (pmt.pid && pmt.table) {
17714 return;
17715 }
17716
17717 startIndex += MP2T_PACKET_LENGTH;
17718 endIndex += MP2T_PACKET_LENGTH;
17719 continue;
17720 } // If we get here, we have somehow become de-synchronized and we need to step
17721 // forward one byte at a time until we find a pair of sync bytes that denote
17722 // a packet
17723
17724
17725 startIndex++;
17726 endIndex++;
17727 }
17728 };
17729 /**
17730 * walks through the segment data from the start and end to get timing information
17731 * for the first and last audio pes packets
17732 */
17733
17734
17735 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
17736 var startIndex = 0,
17737 endIndex = MP2T_PACKET_LENGTH,
17738 packet,
17739 type,
17740 pesType,
17741 pusi,
17742 parsed;
17743 var endLoop = false; // Start walking from start of segment to get first audio packet
17744
17745 while (endIndex <= bytes.byteLength) {
17746 // Look for a pair of start and end sync bytes in the data..
17747 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17748 // We found a packet
17749 packet = bytes.subarray(startIndex, endIndex);
17750 type = probe$1.ts.parseType(packet, pmt.pid);
17751
17752 switch (type) {
17753 case 'pes':
17754 pesType = probe$1.ts.parsePesType(packet, pmt.table);
17755 pusi = probe$1.ts.parsePayloadUnitStartIndicator(packet);
17756
17757 if (pesType === 'audio' && pusi) {
17758 parsed = probe$1.ts.parsePesTime(packet);
17759
17760 if (parsed) {
17761 parsed.type = 'audio';
17762 result.audio.push(parsed);
17763 endLoop = true;
17764 }
17765 }
17766
17767 break;
17768 }
17769
17770 if (endLoop) {
17771 break;
17772 }
17773
17774 startIndex += MP2T_PACKET_LENGTH;
17775 endIndex += MP2T_PACKET_LENGTH;
17776 continue;
17777 } // If we get here, we have somehow become de-synchronized and we need to step
17778 // forward one byte at a time until we find a pair of sync bytes that denote
17779 // a packet
17780
17781
17782 startIndex++;
17783 endIndex++;
17784 } // Start walking from end of segment to get last audio packet
17785
17786
17787 endIndex = bytes.byteLength;
17788 startIndex = endIndex - MP2T_PACKET_LENGTH;
17789 endLoop = false;
17790
17791 while (startIndex >= 0) {
17792 // Look for a pair of start and end sync bytes in the data..
17793 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17794 // We found a packet
17795 packet = bytes.subarray(startIndex, endIndex);
17796 type = probe$1.ts.parseType(packet, pmt.pid);
17797
17798 switch (type) {
17799 case 'pes':
17800 pesType = probe$1.ts.parsePesType(packet, pmt.table);
17801 pusi = probe$1.ts.parsePayloadUnitStartIndicator(packet);
17802
17803 if (pesType === 'audio' && pusi) {
17804 parsed = probe$1.ts.parsePesTime(packet);
17805
17806 if (parsed) {
17807 parsed.type = 'audio';
17808 result.audio.push(parsed);
17809 endLoop = true;
17810 }
17811 }
17812
17813 break;
17814 }
17815
17816 if (endLoop) {
17817 break;
17818 }
17819
17820 startIndex -= MP2T_PACKET_LENGTH;
17821 endIndex -= MP2T_PACKET_LENGTH;
17822 continue;
17823 } // If we get here, we have somehow become de-synchronized and we need to step
17824 // forward one byte at a time until we find a pair of sync bytes that denote
17825 // a packet
17826
17827
17828 startIndex--;
17829 endIndex--;
17830 }
17831 };
17832 /**
17833 * walks through the segment data from the start and end to get timing information
17834 * for the first and last video pes packets as well as timing information for the first
17835 * key frame.
17836 */
17837
17838
17839 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
17840 var startIndex = 0,
17841 endIndex = MP2T_PACKET_LENGTH,
17842 packet,
17843 type,
17844 pesType,
17845 pusi,
17846 parsed,
17847 frame,
17848 i,
17849 pes;
17850 var endLoop = false;
17851 var currentFrame = {
17852 data: [],
17853 size: 0
17854 }; // Start walking from start of segment to get first video packet
17855
17856 while (endIndex < bytes.byteLength) {
17857 // Look for a pair of start and end sync bytes in the data..
17858 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17859 // We found a packet
17860 packet = bytes.subarray(startIndex, endIndex);
17861 type = probe$1.ts.parseType(packet, pmt.pid);
17862
17863 switch (type) {
17864 case 'pes':
17865 pesType = probe$1.ts.parsePesType(packet, pmt.table);
17866 pusi = probe$1.ts.parsePayloadUnitStartIndicator(packet);
17867
17868 if (pesType === 'video') {
17869 if (pusi && !endLoop) {
17870 parsed = probe$1.ts.parsePesTime(packet);
17871
17872 if (parsed) {
17873 parsed.type = 'video';
17874 result.video.push(parsed);
17875 endLoop = true;
17876 }
17877 }
17878
17879 if (!result.firstKeyFrame) {
17880 if (pusi) {
17881 if (currentFrame.size !== 0) {
17882 frame = new Uint8Array(currentFrame.size);
17883 i = 0;
17884
17885 while (currentFrame.data.length) {
17886 pes = currentFrame.data.shift();
17887 frame.set(pes, i);
17888 i += pes.byteLength;
17889 }
17890
17891 if (probe$1.ts.videoPacketContainsKeyFrame(frame)) {
17892 var firstKeyFrame = probe$1.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
17893 // the keyframe seems to work fine with HLS playback
17894 // and definitely preferable to a crash with TypeError...
17895
17896 if (firstKeyFrame) {
17897 result.firstKeyFrame = firstKeyFrame;
17898 result.firstKeyFrame.type = 'video';
17899 } else {
17900 // eslint-disable-next-line
17901 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
17902 }
17903 }
17904
17905 currentFrame.size = 0;
17906 }
17907 }
17908
17909 currentFrame.data.push(packet);
17910 currentFrame.size += packet.byteLength;
17911 }
17912 }
17913
17914 break;
17915 }
17916
17917 if (endLoop && result.firstKeyFrame) {
17918 break;
17919 }
17920
17921 startIndex += MP2T_PACKET_LENGTH;
17922 endIndex += MP2T_PACKET_LENGTH;
17923 continue;
17924 } // If we get here, we have somehow become de-synchronized and we need to step
17925 // forward one byte at a time until we find a pair of sync bytes that denote
17926 // a packet
17927
17928
17929 startIndex++;
17930 endIndex++;
17931 } // Start walking from end of segment to get last video packet
17932
17933
17934 endIndex = bytes.byteLength;
17935 startIndex = endIndex - MP2T_PACKET_LENGTH;
17936 endLoop = false;
17937
17938 while (startIndex >= 0) {
17939 // Look for a pair of start and end sync bytes in the data..
17940 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17941 // We found a packet
17942 packet = bytes.subarray(startIndex, endIndex);
17943 type = probe$1.ts.parseType(packet, pmt.pid);
17944
17945 switch (type) {
17946 case 'pes':
17947 pesType = probe$1.ts.parsePesType(packet, pmt.table);
17948 pusi = probe$1.ts.parsePayloadUnitStartIndicator(packet);
17949
17950 if (pesType === 'video' && pusi) {
17951 parsed = probe$1.ts.parsePesTime(packet);
17952
17953 if (parsed) {
17954 parsed.type = 'video';
17955 result.video.push(parsed);
17956 endLoop = true;
17957 }
17958 }
17959
17960 break;
17961 }
17962
17963 if (endLoop) {
17964 break;
17965 }
17966
17967 startIndex -= MP2T_PACKET_LENGTH;
17968 endIndex -= MP2T_PACKET_LENGTH;
17969 continue;
17970 } // If we get here, we have somehow become de-synchronized and we need to step
17971 // forward one byte at a time until we find a pair of sync bytes that denote
17972 // a packet
17973
17974
17975 startIndex--;
17976 endIndex--;
17977 }
17978 };
17979 /**
17980 * Adjusts the timestamp information for the segment to account for
17981 * rollover and convert to seconds based on pes packet timescale (90khz clock)
17982 */
17983
17984
17985 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
17986 if (segmentInfo.audio && segmentInfo.audio.length) {
17987 var audioBaseTimestamp = baseTimestamp;
17988
17989 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
17990 audioBaseTimestamp = segmentInfo.audio[0].dts;
17991 }
17992
17993 segmentInfo.audio.forEach(function (info) {
17994 info.dts = handleRollover(info.dts, audioBaseTimestamp);
17995 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
17996
17997 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
17998 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
17999 });
18000 }
18001
18002 if (segmentInfo.video && segmentInfo.video.length) {
18003 var videoBaseTimestamp = baseTimestamp;
18004
18005 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
18006 videoBaseTimestamp = segmentInfo.video[0].dts;
18007 }
18008
18009 segmentInfo.video.forEach(function (info) {
18010 info.dts = handleRollover(info.dts, videoBaseTimestamp);
18011 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
18012
18013 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18014 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18015 });
18016
18017 if (segmentInfo.firstKeyFrame) {
18018 var frame = segmentInfo.firstKeyFrame;
18019 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
18020 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
18021
18022 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
18023 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
18024 }
18025 }
18026 };
18027 /**
18028 * inspects the aac data stream for start and end time information
18029 */
18030
18031
18032 var inspectAac_ = function inspectAac_(bytes) {
18033 var endLoop = false,
18034 audioCount = 0,
18035 sampleRate = null,
18036 timestamp = null,
18037 frameSize = 0,
18038 byteIndex = 0,
18039 packet;
18040
18041 while (bytes.length - byteIndex >= 3) {
18042 var type = probe$1.aac.parseType(bytes, byteIndex);
18043
18044 switch (type) {
18045 case 'timed-metadata':
18046 // Exit early because we don't have enough to parse
18047 // the ID3 tag header
18048 if (bytes.length - byteIndex < 10) {
18049 endLoop = true;
18050 break;
18051 }
18052
18053 frameSize = probe$1.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18054 // to emit a full packet
18055
18056 if (frameSize > bytes.length) {
18057 endLoop = true;
18058 break;
18059 }
18060
18061 if (timestamp === null) {
18062 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18063 timestamp = probe$1.aac.parseAacTimestamp(packet);
18064 }
18065
18066 byteIndex += frameSize;
18067 break;
18068
18069 case 'audio':
18070 // Exit early because we don't have enough to parse
18071 // the ADTS frame header
18072 if (bytes.length - byteIndex < 7) {
18073 endLoop = true;
18074 break;
18075 }
18076
18077 frameSize = probe$1.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18078 // to emit a full packet
18079
18080 if (frameSize > bytes.length) {
18081 endLoop = true;
18082 break;
18083 }
18084
18085 if (sampleRate === null) {
18086 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18087 sampleRate = probe$1.aac.parseSampleRate(packet);
18088 }
18089
18090 audioCount++;
18091 byteIndex += frameSize;
18092 break;
18093
18094 default:
18095 byteIndex++;
18096 break;
18097 }
18098
18099 if (endLoop) {
18100 return null;
18101 }
18102 }
18103
18104 if (sampleRate === null || timestamp === null) {
18105 return null;
18106 }
18107
18108 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
18109 var result = {
18110 audio: [{
18111 type: 'audio',
18112 dts: timestamp,
18113 pts: timestamp
18114 }, {
18115 type: 'audio',
18116 dts: timestamp + audioCount * 1024 * audioTimescale,
18117 pts: timestamp + audioCount * 1024 * audioTimescale
18118 }]
18119 };
18120 return result;
18121 };
18122 /**
18123 * inspects the transport stream segment data for start and end time information
18124 * of the audio and video tracks (when present) as well as the first key frame's
18125 * start time.
18126 */
18127
18128
18129 var inspectTs_ = function inspectTs_(bytes) {
18130 var pmt = {
18131 pid: null,
18132 table: null
18133 };
18134 var result = {};
18135 parsePsi_(bytes, pmt);
18136
18137 for (var pid in pmt.table) {
18138 if (pmt.table.hasOwnProperty(pid)) {
18139 var type = pmt.table[pid];
18140
18141 switch (type) {
18142 case streamTypes.H264_STREAM_TYPE:
18143 result.video = [];
18144 parseVideoPes_(bytes, pmt, result);
18145
18146 if (result.video.length === 0) {
18147 delete result.video;
18148 }
18149
18150 break;
18151
18152 case streamTypes.ADTS_STREAM_TYPE:
18153 result.audio = [];
18154 parseAudioPes_(bytes, pmt, result);
18155
18156 if (result.audio.length === 0) {
18157 delete result.audio;
18158 }
18159
18160 break;
18161 }
18162 }
18163 }
18164
18165 return result;
18166 };
18167 /**
18168 * Inspects segment byte data and returns an object with start and end timing information
18169 *
18170 * @param {Uint8Array} bytes The segment byte data
18171 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
18172 * timestamps for rollover. This value must be in 90khz clock.
18173 * @return {Object} Object containing start and end frame timing info of segment.
18174 */
18175
18176
18177 var inspect = function inspect(bytes, baseTimestamp) {
18178 var isAacData = probe$1.aac.isLikelyAacData(bytes);
18179 var result;
18180
18181 if (isAacData) {
18182 result = inspectAac_(bytes);
18183 } else {
18184 result = inspectTs_(bytes);
18185 }
18186
18187 if (!result || !result.audio && !result.video) {
18188 return null;
18189 }
18190
18191 adjustTimestamp_(result, baseTimestamp);
18192 return result;
18193 };
18194
18195 var tsInspector = {
18196 inspect: inspect,
18197 parseAudioPes_: parseAudioPes_
18198 };
18199
18200 /**
18201 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
18202 * internal "media time," as well as whether it contains video and/or audio.
18203 *
18204 * @private
18205 * @param {Uint8Array} bytes - segment bytes
18206 * @param {number} baseStartTime
18207 * Relative reference timestamp used when adjusting frame timestamps for rollover.
18208 * This value should be in seconds, as it's converted to a 90khz clock within the
18209 * function body.
18210 * @return {Object} The start time of the current segment in "media time" as well as
18211 * whether it contains video and/or audio
18212 */
18213
18214 var probeTsSegment = function probeTsSegment(bytes, baseStartTime) {
18215 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
18216 var timeInfo = tsInspector.inspect(bytes, tsStartTime);
18217
18218 if (!timeInfo) {
18219 return null;
18220 }
18221
18222 var result = {
18223 // each type's time info comes back as an array of 2 times, start and end
18224 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
18225 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
18226 };
18227
18228 if (result.hasVideo) {
18229 result.videoStart = timeInfo.video[0].ptsTime;
18230 }
18231
18232 if (result.hasAudio) {
18233 result.audioStart = timeInfo.audio[0].ptsTime;
18234 }
18235
18236 return result;
18237 };
18238 /**
18239 * Combine all segments into a single Uint8Array
18240 *
18241 * @param {Object} segmentObj
18242 * @return {Uint8Array} concatenated bytes
18243 * @private
18244 */
18245
18246 var concatSegments = function concatSegments(segmentObj) {
18247 var offset = 0;
18248 var tempBuffer;
18249
18250 if (segmentObj.bytes) {
18251 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
18252
18253 segmentObj.segments.forEach(function (segment) {
18254 tempBuffer.set(segment, offset);
18255 offset += segment.byteLength;
18256 });
18257 }
18258
18259 return tempBuffer;
18260 };
18261
18262 /**
18263 * mux.js
18264 *
18265 * Copyright (c) Brightcove
18266 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
18267 */
18268 var toUnsigned$3 = function toUnsigned(value) {
18269 return value >>> 0;
18270 };
18271
18272 var toHexString$1 = function toHexString(value) {
18273 return ('00' + value.toString(16)).slice(-2);
18274 };
18275
18276 var bin = {
18277 toUnsigned: toUnsigned$3,
18278 toHexString: toHexString$1
18279 };
18280
18281 var parseType = function parseType(buffer) {
18282 var result = '';
18283 result += String.fromCharCode(buffer[0]);
18284 result += String.fromCharCode(buffer[1]);
18285 result += String.fromCharCode(buffer[2]);
18286 result += String.fromCharCode(buffer[3]);
18287 return result;
18288 };
18289
18290 var parseType_1 = parseType;
18291
18292 var toUnsigned$2 = bin.toUnsigned;
18293
18294 var findBox = function findBox(data, path) {
18295 var results = [],
18296 i,
18297 size,
18298 type,
18299 end,
18300 subresults;
18301
18302 if (!path.length) {
18303 // short-circuit the search for empty paths
18304 return null;
18305 }
18306
18307 for (i = 0; i < data.byteLength;) {
18308 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
18309 type = parseType_1(data.subarray(i + 4, i + 8));
18310 end = size > 1 ? i + size : data.byteLength;
18311
18312 if (type === path[0]) {
18313 if (path.length === 1) {
18314 // this is the end of the path and we've found the box we were
18315 // looking for
18316 results.push(data.subarray(i + 8, end));
18317 } else {
18318 // recursively search for the next box along the path
18319 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
18320
18321 if (subresults.length) {
18322 results = results.concat(subresults);
18323 }
18324 }
18325 }
18326
18327 i = end;
18328 } // we've finished searching all of data
18329
18330
18331 return results;
18332 };
18333
18334 var findBox_1 = findBox;
18335
18336 var tfhd = function tfhd(data) {
18337 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
18338 result = {
18339 version: data[0],
18340 flags: new Uint8Array(data.subarray(1, 4)),
18341 trackId: view.getUint32(4)
18342 },
18343 baseDataOffsetPresent = result.flags[2] & 0x01,
18344 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
18345 defaultSampleDurationPresent = result.flags[2] & 0x08,
18346 defaultSampleSizePresent = result.flags[2] & 0x10,
18347 defaultSampleFlagsPresent = result.flags[2] & 0x20,
18348 durationIsEmpty = result.flags[0] & 0x010000,
18349 defaultBaseIsMoof = result.flags[0] & 0x020000,
18350 i;
18351 i = 8;
18352
18353 if (baseDataOffsetPresent) {
18354 i += 4; // truncate top 4 bytes
18355 // FIXME: should we read the full 64 bits?
18356
18357 result.baseDataOffset = view.getUint32(12);
18358 i += 4;
18359 }
18360
18361 if (sampleDescriptionIndexPresent) {
18362 result.sampleDescriptionIndex = view.getUint32(i);
18363 i += 4;
18364 }
18365
18366 if (defaultSampleDurationPresent) {
18367 result.defaultSampleDuration = view.getUint32(i);
18368 i += 4;
18369 }
18370
18371 if (defaultSampleSizePresent) {
18372 result.defaultSampleSize = view.getUint32(i);
18373 i += 4;
18374 }
18375
18376 if (defaultSampleFlagsPresent) {
18377 result.defaultSampleFlags = view.getUint32(i);
18378 }
18379
18380 if (durationIsEmpty) {
18381 result.durationIsEmpty = true;
18382 }
18383
18384 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
18385 result.baseDataOffsetIsMoof = true;
18386 }
18387
18388 return result;
18389 };
18390
18391 var parseTfhd = tfhd;
18392
18393 var parseSampleFlags = function parseSampleFlags(flags) {
18394 return {
18395 isLeading: (flags[0] & 0x0c) >>> 2,
18396 dependsOn: flags[0] & 0x03,
18397 isDependedOn: (flags[1] & 0xc0) >>> 6,
18398 hasRedundancy: (flags[1] & 0x30) >>> 4,
18399 paddingValue: (flags[1] & 0x0e) >>> 1,
18400 isNonSyncSample: flags[1] & 0x01,
18401 degradationPriority: flags[2] << 8 | flags[3]
18402 };
18403 };
18404
18405 var parseSampleFlags_1 = parseSampleFlags;
18406
18407 var trun = function trun(data) {
18408 var result = {
18409 version: data[0],
18410 flags: new Uint8Array(data.subarray(1, 4)),
18411 samples: []
18412 },
18413 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
18414 // Flag interpretation
18415 dataOffsetPresent = result.flags[2] & 0x01,
18416 // compare with 2nd byte of 0x1
18417 firstSampleFlagsPresent = result.flags[2] & 0x04,
18418 // compare with 2nd byte of 0x4
18419 sampleDurationPresent = result.flags[1] & 0x01,
18420 // compare with 2nd byte of 0x100
18421 sampleSizePresent = result.flags[1] & 0x02,
18422 // compare with 2nd byte of 0x200
18423 sampleFlagsPresent = result.flags[1] & 0x04,
18424 // compare with 2nd byte of 0x400
18425 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
18426 // compare with 2nd byte of 0x800
18427 sampleCount = view.getUint32(4),
18428 offset = 8,
18429 sample;
18430
18431 if (dataOffsetPresent) {
18432 // 32 bit signed integer
18433 result.dataOffset = view.getInt32(offset);
18434 offset += 4;
18435 } // Overrides the flags for the first sample only. The order of
18436 // optional values will be: duration, size, compositionTimeOffset
18437
18438
18439 if (firstSampleFlagsPresent && sampleCount) {
18440 sample = {
18441 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
18442 };
18443 offset += 4;
18444
18445 if (sampleDurationPresent) {
18446 sample.duration = view.getUint32(offset);
18447 offset += 4;
18448 }
18449
18450 if (sampleSizePresent) {
18451 sample.size = view.getUint32(offset);
18452 offset += 4;
18453 }
18454
18455 if (sampleCompositionTimeOffsetPresent) {
18456 if (result.version === 1) {
18457 sample.compositionTimeOffset = view.getInt32(offset);
18458 } else {
18459 sample.compositionTimeOffset = view.getUint32(offset);
18460 }
18461
18462 offset += 4;
18463 }
18464
18465 result.samples.push(sample);
18466 sampleCount--;
18467 }
18468
18469 while (sampleCount--) {
18470 sample = {};
18471
18472 if (sampleDurationPresent) {
18473 sample.duration = view.getUint32(offset);
18474 offset += 4;
18475 }
18476
18477 if (sampleSizePresent) {
18478 sample.size = view.getUint32(offset);
18479 offset += 4;
18480 }
18481
18482 if (sampleFlagsPresent) {
18483 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
18484 offset += 4;
18485 }
18486
18487 if (sampleCompositionTimeOffsetPresent) {
18488 if (result.version === 1) {
18489 sample.compositionTimeOffset = view.getInt32(offset);
18490 } else {
18491 sample.compositionTimeOffset = view.getUint32(offset);
18492 }
18493
18494 offset += 4;
18495 }
18496
18497 result.samples.push(sample);
18498 }
18499
18500 return result;
18501 };
18502
18503 var parseTrun = trun;
18504
18505 var toUnsigned$1 = bin.toUnsigned;
18506
18507 var tfdt = function tfdt(data) {
18508 var result = {
18509 version: data[0],
18510 flags: new Uint8Array(data.subarray(1, 4)),
18511 baseMediaDecodeTime: toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
18512 };
18513
18514 if (result.version === 1) {
18515 result.baseMediaDecodeTime *= Math.pow(2, 32);
18516 result.baseMediaDecodeTime += toUnsigned$1(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
18517 }
18518
18519 return result;
18520 };
18521
18522 var parseTfdt = tfdt;
18523
18524 var toUnsigned = bin.toUnsigned;
18525 var toHexString = bin.toHexString;
18526 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
18527 /**
18528 * Parses an MP4 initialization segment and extracts the timescale
18529 * values for any declared tracks. Timescale values indicate the
18530 * number of clock ticks per second to assume for time-based values
18531 * elsewhere in the MP4.
18532 *
18533 * To determine the start time of an MP4, you need two pieces of
18534 * information: the timescale unit and the earliest base media decode
18535 * time. Multiple timescales can be specified within an MP4 but the
18536 * base media decode time is always expressed in the timescale from
18537 * the media header box for the track:
18538 * ```
18539 * moov > trak > mdia > mdhd.timescale
18540 * ```
18541 * @param init {Uint8Array} the bytes of the init segment
18542 * @return {object} a hash of track ids to timescale values or null if
18543 * the init segment is malformed.
18544 */
18545
18546 timescale = function timescale(init) {
18547 var result = {},
18548 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
18549
18550 return traks.reduce(function (result, trak) {
18551 var tkhd, version, index, id, mdhd;
18552 tkhd = findBox_1(trak, ['tkhd'])[0];
18553
18554 if (!tkhd) {
18555 return null;
18556 }
18557
18558 version = tkhd[0];
18559 index = version === 0 ? 12 : 20;
18560 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
18561 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
18562
18563 if (!mdhd) {
18564 return null;
18565 }
18566
18567 version = mdhd[0];
18568 index = version === 0 ? 12 : 20;
18569 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
18570 return result;
18571 }, result);
18572 };
18573 /**
18574 * Determine the base media decode start time, in seconds, for an MP4
18575 * fragment. If multiple fragments are specified, the earliest time is
18576 * returned.
18577 *
18578 * The base media decode time can be parsed from track fragment
18579 * metadata:
18580 * ```
18581 * moof > traf > tfdt.baseMediaDecodeTime
18582 * ```
18583 * It requires the timescale value from the mdhd to interpret.
18584 *
18585 * @param timescale {object} a hash of track ids to timescale values.
18586 * @return {number} the earliest base media decode start time for the
18587 * fragment, in seconds
18588 */
18589
18590
18591 startTime = function startTime(timescale, fragment) {
18592 var trafs, baseTimes, result; // we need info from two childrend of each track fragment box
18593
18594 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
18595
18596 baseTimes = [].concat.apply([], trafs.map(function (traf) {
18597 return findBox_1(traf, ['tfhd']).map(function (tfhd) {
18598 var id, scale, baseTime; // get the track id from the tfhd
18599
18600 id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
18601
18602 scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
18603
18604 baseTime = findBox_1(traf, ['tfdt']).map(function (tfdt) {
18605 var version, result;
18606 version = tfdt[0];
18607 result = toUnsigned(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]);
18608
18609 if (version === 1) {
18610 result *= Math.pow(2, 32);
18611 result += toUnsigned(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11]);
18612 }
18613
18614 return result;
18615 })[0];
18616 baseTime = typeof baseTime === 'number' && !isNaN(baseTime) ? baseTime : Infinity; // convert base time to seconds
18617
18618 return baseTime / scale;
18619 });
18620 })); // return the minimum
18621
18622 result = Math.min.apply(null, baseTimes);
18623 return isFinite(result) ? result : 0;
18624 };
18625 /**
18626 * Determine the composition start, in seconds, for an MP4
18627 * fragment.
18628 *
18629 * The composition start time of a fragment can be calculated using the base
18630 * media decode time, composition time offset, and timescale, as follows:
18631 *
18632 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
18633 *
18634 * All of the aforementioned information is contained within a media fragment's
18635 * `traf` box, except for timescale info, which comes from the initialization
18636 * segment, so a track id (also contained within a `traf`) is also necessary to
18637 * associate it with a timescale
18638 *
18639 *
18640 * @param timescales {object} - a hash of track ids to timescale values.
18641 * @param fragment {Unit8Array} - the bytes of a media segment
18642 * @return {number} the composition start time for the fragment, in seconds
18643 **/
18644
18645
18646 compositionStartTime = function compositionStartTime(timescales, fragment) {
18647 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
18648 var baseMediaDecodeTime = 0;
18649 var compositionTimeOffset = 0;
18650 var trackId;
18651
18652 if (trafBoxes && trafBoxes.length) {
18653 // The spec states that track run samples contained within a `traf` box are contiguous, but
18654 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
18655 // We will assume that they are, so we only need the first to calculate start time.
18656 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
18657 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
18658 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
18659
18660 if (tfhd) {
18661 var parsedTfhd = parseTfhd(tfhd);
18662 trackId = parsedTfhd.trackId;
18663 }
18664
18665 if (tfdt) {
18666 var parsedTfdt = parseTfdt(tfdt);
18667 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
18668 }
18669
18670 if (trun) {
18671 var parsedTrun = parseTrun(trun);
18672
18673 if (parsedTrun.samples && parsedTrun.samples.length) {
18674 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
18675 }
18676 }
18677 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
18678 // specified.
18679
18680
18681 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
18682
18683 return (baseMediaDecodeTime + compositionTimeOffset) / timescale;
18684 };
18685 /**
18686 * Find the trackIds of the video tracks in this source.
18687 * Found by parsing the Handler Reference and Track Header Boxes:
18688 * moov > trak > mdia > hdlr
18689 * moov > trak > tkhd
18690 *
18691 * @param {Uint8Array} init - The bytes of the init segment for this source
18692 * @return {Number[]} A list of trackIds
18693 *
18694 * @see ISO-BMFF-12/2015, Section 8.4.3
18695 **/
18696
18697
18698 getVideoTrackIds = function getVideoTrackIds(init) {
18699 var traks = findBox_1(init, ['moov', 'trak']);
18700 var videoTrackIds = [];
18701 traks.forEach(function (trak) {
18702 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
18703 var tkhds = findBox_1(trak, ['tkhd']);
18704 hdlrs.forEach(function (hdlr, index) {
18705 var handlerType = parseType_1(hdlr.subarray(8, 12));
18706 var tkhd = tkhds[index];
18707 var view;
18708 var version;
18709 var trackId;
18710
18711 if (handlerType === 'vide') {
18712 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
18713 version = view.getUint8(0);
18714 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
18715 videoTrackIds.push(trackId);
18716 }
18717 });
18718 });
18719 return videoTrackIds;
18720 };
18721
18722 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
18723 // mdhd is a FullBox, meaning it will have its own version as the first byte
18724 var version = mdhd[0];
18725 var index = version === 0 ? 12 : 20;
18726 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
18727 };
18728 /**
18729 * Get all the video, audio, and hint tracks from a non fragmented
18730 * mp4 segment
18731 */
18732
18733
18734 getTracks = function getTracks(init) {
18735 var traks = findBox_1(init, ['moov', 'trak']);
18736 var tracks = [];
18737 traks.forEach(function (trak) {
18738 var track = {};
18739 var tkhd = findBox_1(trak, ['tkhd'])[0];
18740 var view, tkhdVersion; // id
18741
18742 if (tkhd) {
18743 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
18744 tkhdVersion = view.getUint8(0);
18745 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
18746 }
18747
18748 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
18749
18750 if (hdlr) {
18751 var type = parseType_1(hdlr.subarray(8, 12));
18752
18753 if (type === 'vide') {
18754 track.type = 'video';
18755 } else if (type === 'soun') {
18756 track.type = 'audio';
18757 } else {
18758 track.type = type;
18759 }
18760 } // codec
18761
18762
18763 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
18764
18765 if (stsd) {
18766 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
18767
18768 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
18769 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
18770 var codecConfig, codecConfigType;
18771
18772 if (codecBox) {
18773 // https://tools.ietf.org/html/rfc6381#section-3.3
18774 if (/^[a-z]vc[1-9]$/i.test(track.codec)) {
18775 // we don't need anything but the "config" parameter of the
18776 // avc1 codecBox
18777 codecConfig = codecBox.subarray(78);
18778 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
18779
18780 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
18781 track.codec += '.'; // left padded with zeroes for single digit hex
18782 // profile idc
18783
18784 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
18785
18786 track.codec += toHexString(codecConfig[10]); // level idc
18787
18788 track.codec += toHexString(codecConfig[11]);
18789 } else {
18790 // TODO: show a warning that we couldn't parse the codec
18791 // and are using the default
18792 track.codec = 'avc1.4d400d';
18793 }
18794 } else if (/^mp4[a,v]$/i.test(track.codec)) {
18795 // we do not need anything but the streamDescriptor of the mp4a codecBox
18796 codecConfig = codecBox.subarray(28);
18797 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
18798
18799 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
18800 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
18801
18802 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
18803 } else {
18804 // TODO: show a warning that we couldn't parse the codec
18805 // and are using the default
18806 track.codec = 'mp4a.40.2';
18807 }
18808 } else {
18809 // flac, opus, etc
18810 track.codec = track.codec.toLowerCase();
18811 }
18812 }
18813 }
18814
18815 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
18816
18817 if (mdhd) {
18818 track.timescale = getTimescaleFromMediaHeader(mdhd);
18819 }
18820
18821 tracks.push(track);
18822 });
18823 return tracks;
18824 };
18825
18826 var probe = {
18827 // export mp4 inspector's findBox and parseType for backwards compatibility
18828 findBox: findBox_1,
18829 parseType: parseType_1,
18830 timescale: timescale,
18831 startTime: startTime,
18832 compositionStartTime: compositionStartTime,
18833 videoTrackIds: getVideoTrackIds,
18834 tracks: getTracks,
18835 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
18836 };
18837
18838 var REQUEST_ERRORS = {
18839 FAILURE: 2,
18840 TIMEOUT: -101,
18841 ABORTED: -102
18842 };
18843 /**
18844 * Abort all requests
18845 *
18846 * @param {Object} activeXhrs - an object that tracks all XHR requests
18847 */
18848
18849 var abortAll = function abortAll(activeXhrs) {
18850 activeXhrs.forEach(function (xhr) {
18851 xhr.abort();
18852 });
18853 };
18854 /**
18855 * Gather important bandwidth stats once a request has completed
18856 *
18857 * @param {Object} request - the XHR request from which to gather stats
18858 */
18859
18860
18861 var getRequestStats = function getRequestStats(request) {
18862 return {
18863 bandwidth: request.bandwidth,
18864 bytesReceived: request.bytesReceived || 0,
18865 roundTripTime: request.roundTripTime || 0
18866 };
18867 };
18868 /**
18869 * If possible gather bandwidth stats as a request is in
18870 * progress
18871 *
18872 * @param {Event} progressEvent - an event object from an XHR's progress event
18873 */
18874
18875
18876 var getProgressStats = function getProgressStats(progressEvent) {
18877 var request = progressEvent.target;
18878 var roundTripTime = Date.now() - request.requestTime;
18879 var stats = {
18880 bandwidth: Infinity,
18881 bytesReceived: 0,
18882 roundTripTime: roundTripTime || 0
18883 };
18884 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
18885 // because we should only use bandwidth stats on progress to determine when
18886 // abort a request early due to insufficient bandwidth
18887
18888 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
18889 return stats;
18890 };
18891 /**
18892 * Handle all error conditions in one place and return an object
18893 * with all the information
18894 *
18895 * @param {Error|null} error - if non-null signals an error occured with the XHR
18896 * @param {Object} request - the XHR request that possibly generated the error
18897 */
18898
18899
18900 var handleErrors = function handleErrors(error, request) {
18901 if (request.timedout) {
18902 return {
18903 status: request.status,
18904 message: 'HLS request timed-out at URL: ' + request.uri,
18905 code: REQUEST_ERRORS.TIMEOUT,
18906 xhr: request
18907 };
18908 }
18909
18910 if (request.aborted) {
18911 return {
18912 status: request.status,
18913 message: 'HLS request aborted at URL: ' + request.uri,
18914 code: REQUEST_ERRORS.ABORTED,
18915 xhr: request
18916 };
18917 }
18918
18919 if (error) {
18920 return {
18921 status: request.status,
18922 message: 'HLS request errored at URL: ' + request.uri,
18923 code: REQUEST_ERRORS.FAILURE,
18924 xhr: request
18925 };
18926 }
18927
18928 return null;
18929 };
18930 /**
18931 * Handle responses for key data and convert the key data to the correct format
18932 * for the decryption step later
18933 *
18934 * @param {Object} segment - a simplified copy of the segmentInfo object
18935 * from SegmentLoader
18936 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18937 * this request
18938 */
18939
18940
18941 var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
18942 return function (error, request) {
18943 var response = request.response;
18944 var errorObj = handleErrors(error, request);
18945
18946 if (errorObj) {
18947 return finishProcessingFn(errorObj, segment);
18948 }
18949
18950 if (response.byteLength !== 16) {
18951 return finishProcessingFn({
18952 status: request.status,
18953 message: 'Invalid HLS key at URL: ' + request.uri,
18954 code: REQUEST_ERRORS.FAILURE,
18955 xhr: request
18956 }, segment);
18957 }
18958
18959 var view = new DataView(response);
18960 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
18961 return finishProcessingFn(null, segment);
18962 };
18963 };
18964 /**
18965 * Handle init-segment responses
18966 *
18967 * @param {Object} segment - a simplified copy of the segmentInfo object
18968 * from SegmentLoader
18969 * @param {Function} finishProcessingFn - a callback to execute to continue processing
18970 * this request
18971 */
18972
18973
18974 var handleInitSegmentResponse = function handleInitSegmentResponse(_ref) {
18975 var segment = _ref.segment,
18976 finishProcessingFn = _ref.finishProcessingFn;
18977 return function (error, request) {
18978 var response = request.response;
18979 var errorObj = handleErrors(error, request);
18980
18981 if (errorObj) {
18982 return finishProcessingFn(errorObj, segment);
18983 } // stop processing if received empty content
18984
18985
18986 if (response.byteLength === 0) {
18987 return finishProcessingFn({
18988 status: request.status,
18989 message: 'Empty HLS segment content at URL: ' + request.uri,
18990 code: REQUEST_ERRORS.FAILURE,
18991 xhr: request
18992 }, segment);
18993 }
18994
18995 segment.map.bytes = new Uint8Array(request.response);
18996 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
18997 // only know how to parse mp4 init segments at the moment
18998
18999 if (type !== 'mp4') {
19000 return finishProcessingFn({
19001 status: request.status,
19002 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + request.uri,
19003 code: REQUEST_ERRORS.FAILURE,
19004 internal: true,
19005 xhr: request
19006 }, segment);
19007 }
19008
19009 var tracks = probe.tracks(segment.map.bytes);
19010 tracks.forEach(function (track) {
19011 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
19012
19013 if (segment.map.tracks[track.type]) {
19014 return;
19015 }
19016
19017 segment.map.tracks[track.type] = track;
19018
19019 if (typeof track.id === 'number' && track.timescale) {
19020 segment.map.timescales = segment.map.timescales || {};
19021 segment.map.timescales[track.id] = track.timescale;
19022 }
19023 });
19024 return finishProcessingFn(null, segment);
19025 };
19026 };
19027 /**
19028 * Response handler for segment-requests being sure to set the correct
19029 * property depending on whether the segment is encryped or not
19030 * Also records and keeps track of stats that are used for ABR purposes
19031 *
19032 * @param {Object} segment - a simplified copy of the segmentInfo object
19033 * from SegmentLoader
19034 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19035 * this request
19036 */
19037
19038
19039 var handleSegmentResponse = function handleSegmentResponse(_ref2) {
19040 var segment = _ref2.segment,
19041 finishProcessingFn = _ref2.finishProcessingFn,
19042 responseType = _ref2.responseType;
19043 return function (error, request) {
19044 var response = request.response;
19045 var errorObj = handleErrors(error, request);
19046
19047 if (errorObj) {
19048 return finishProcessingFn(errorObj, segment);
19049 }
19050
19051 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
19052 // thrown for two primary cases:
19053 // 1. the mime type override stops working, or is not implemented for a specific
19054 // browser
19055 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
19056 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0)); // stop processing if received empty content
19057
19058 if (response.byteLength === 0) {
19059 return finishProcessingFn({
19060 status: request.status,
19061 message: 'Empty HLS segment content at URL: ' + request.uri,
19062 code: REQUEST_ERRORS.FAILURE,
19063 xhr: request
19064 }, segment);
19065 }
19066
19067 segment.stats = getRequestStats(request);
19068
19069 if (segment.key) {
19070 segment.encryptedBytes = new Uint8Array(newBytes);
19071 } else {
19072 segment.bytes = new Uint8Array(newBytes);
19073 }
19074
19075 return finishProcessingFn(null, segment);
19076 };
19077 };
19078
19079 var transmuxAndNotify = function transmuxAndNotify(_ref3) {
19080 var segment = _ref3.segment,
19081 bytes = _ref3.bytes,
19082 isPartial = _ref3.isPartial,
19083 trackInfoFn = _ref3.trackInfoFn,
19084 timingInfoFn = _ref3.timingInfoFn,
19085 videoSegmentTimingInfoFn = _ref3.videoSegmentTimingInfoFn,
19086 audioSegmentTimingInfoFn = _ref3.audioSegmentTimingInfoFn,
19087 id3Fn = _ref3.id3Fn,
19088 captionsFn = _ref3.captionsFn,
19089 isEndOfTimeline = _ref3.isEndOfTimeline,
19090 endedTimelineFn = _ref3.endedTimelineFn,
19091 dataFn = _ref3.dataFn,
19092 doneFn = _ref3.doneFn;
19093 var fmp4Tracks = segment.map && segment.map.tracks || {};
19094 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
19095 // One reason for this is that in the case of full segments, we want to trust start
19096 // times from the probe, rather than the transmuxer.
19097
19098 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
19099 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
19100 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
19101 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end'); // Check to see if we are appending a full segment.
19102
19103 if (!isPartial && !segment.lastReachedChar) {
19104 // In the full segment transmuxer, we don't yet have the ability to extract a "proper"
19105 // start time. Meaning cached frame data may corrupt our notion of where this segment
19106 // really starts. To get around this, full segment appends should probe for the info
19107 // needed.
19108 var probeResult = probeTsSegment(bytes, segment.baseStartTime);
19109
19110 if (probeResult) {
19111 trackInfoFn(segment, {
19112 hasAudio: probeResult.hasAudio,
19113 hasVideo: probeResult.hasVideo,
19114 isMuxed: isMuxed
19115 });
19116 trackInfoFn = null;
19117
19118 if (probeResult.hasAudio && !isMuxed) {
19119 audioStartFn(probeResult.audioStart);
19120 }
19121
19122 if (probeResult.hasVideo) {
19123 videoStartFn(probeResult.videoStart);
19124 }
19125
19126 audioStartFn = null;
19127 videoStartFn = null;
19128 }
19129 }
19130
19131 transmux({
19132 bytes: bytes,
19133 transmuxer: segment.transmuxer,
19134 audioAppendStart: segment.audioAppendStart,
19135 gopsToAlignWith: segment.gopsToAlignWith,
19136 isPartial: isPartial,
19137 remux: isMuxed,
19138 onData: function onData(result) {
19139 result.type = result.type === 'combined' ? 'video' : result.type;
19140 dataFn(segment, result);
19141 },
19142 onTrackInfo: function onTrackInfo(trackInfo) {
19143 if (trackInfoFn) {
19144 if (isMuxed) {
19145 trackInfo.isMuxed = true;
19146 }
19147
19148 trackInfoFn(segment, trackInfo);
19149 }
19150 },
19151 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
19152 // we only want the first start value we encounter
19153 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
19154 audioStartFn(audioTimingInfo.start);
19155 audioStartFn = null;
19156 } // we want to continually update the end time
19157
19158
19159 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
19160 audioEndFn(audioTimingInfo.end);
19161 }
19162 },
19163 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
19164 // we only want the first start value we encounter
19165 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
19166 videoStartFn(videoTimingInfo.start);
19167 videoStartFn = null;
19168 } // we want to continually update the end time
19169
19170
19171 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
19172 videoEndFn(videoTimingInfo.end);
19173 }
19174 },
19175 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
19176 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
19177 },
19178 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
19179 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
19180 },
19181 onId3: function onId3(id3Frames, dispatchType) {
19182 id3Fn(segment, id3Frames, dispatchType);
19183 },
19184 onCaptions: function onCaptions(captions) {
19185 captionsFn(segment, [captions]);
19186 },
19187 // if this is a partial transmux, the end of the timeline has not yet been reached
19188 // until the last part of the segment is processed (at which point isPartial will
19189 // be false)
19190 isEndOfTimeline: isEndOfTimeline && !isPartial,
19191 onEndedTimeline: function onEndedTimeline() {
19192 endedTimelineFn();
19193 },
19194 onDone: function onDone(result) {
19195 // To handle partial appends, there won't be a done function passed in (since
19196 // there's still, potentially, more segment to process), so there's nothing to do.
19197 if (!doneFn || isPartial) {
19198 return;
19199 }
19200
19201 result.type = result.type === 'combined' ? 'video' : result.type;
19202 doneFn(null, segment, result);
19203 }
19204 });
19205 };
19206
19207 var handleSegmentBytes = function handleSegmentBytes(_ref4) {
19208 var segment = _ref4.segment,
19209 bytes = _ref4.bytes,
19210 isPartial = _ref4.isPartial,
19211 trackInfoFn = _ref4.trackInfoFn,
19212 timingInfoFn = _ref4.timingInfoFn,
19213 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
19214 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
19215 id3Fn = _ref4.id3Fn,
19216 captionsFn = _ref4.captionsFn,
19217 isEndOfTimeline = _ref4.isEndOfTimeline,
19218 endedTimelineFn = _ref4.endedTimelineFn,
19219 dataFn = _ref4.dataFn,
19220 doneFn = _ref4.doneFn;
19221 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
19222 // We should have a handler that fetches the number of bytes required
19223 // to check if something is fmp4. This will allow us to save bandwidth
19224 // because we can only blacklist a playlist and abort requests
19225 // by codec after trackinfo triggers.
19226
19227 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
19228 segment.isFmp4 = true;
19229 var tracks = segment.map.tracks;
19230 var trackInfo = {
19231 isFmp4: true,
19232 hasVideo: !!tracks.video,
19233 hasAudio: !!tracks.audio
19234 }; // if we have a audio track, with a codec that is not set to
19235 // encrypted audio
19236
19237 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
19238 trackInfo.audioCodec = tracks.audio.codec;
19239 } // if we have a video track, with a codec that is not set to
19240 // encrypted video
19241
19242
19243 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
19244 trackInfo.videoCodec = tracks.video.codec;
19245 }
19246
19247 if (tracks.video && tracks.audio) {
19248 trackInfo.isMuxed = true;
19249 } // since we don't support appending fmp4 data on progress, we know we have the full
19250 // segment here
19251
19252
19253 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
19254 // time. The end time can be roughly calculated by the receiver using the duration.
19255 //
19256 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
19257 // that is the true start of the segment (where the playback engine should begin
19258 // decoding).
19259
19260 var timingInfo = probe.startTime(segment.map.timescales, bytesAsUint8Array);
19261
19262 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
19263 timingInfoFn(segment, 'audio', 'start', timingInfo);
19264 }
19265
19266 if (trackInfo.hasVideo) {
19267 timingInfoFn(segment, 'video', 'start', timingInfo);
19268 }
19269
19270 var finishLoading = function finishLoading(captions) {
19271 // if the track still has audio at this point it is only possible
19272 // for it to be audio only. See `tracks.video && tracks.audio` if statement
19273 // above.
19274 // we make sure to use segment.bytes here as that
19275 dataFn(segment, {
19276 data: bytes,
19277 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
19278 });
19279
19280 if (captions && captions.length) {
19281 captionsFn(segment, captions);
19282 }
19283
19284 doneFn(null, segment, {});
19285 }; // Run through the CaptionParser in case there are captions.
19286 // Initialize CaptionParser if it hasn't been yet
19287
19288
19289 if (!tracks.video || !bytes.byteLength || !segment.transmuxer) {
19290 finishLoading();
19291 return;
19292 }
19293
19294 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
19295 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
19296
19297 var listenForCaptions = function listenForCaptions(event) {
19298 if (event.data.action !== 'mp4Captions') {
19299 return;
19300 }
19301
19302 segment.transmuxer.removeEventListener('message', listenForCaptions);
19303 var data = event.data.data; // transfer ownership of bytes back to us.
19304
19305 segment.bytes = bytes = new Uint8Array(data, data.byteOffset || 0, data.byteLength);
19306 finishLoading(event.data.captions);
19307 };
19308
19309 segment.transmuxer.addEventListener('message', listenForCaptions); // transfer ownership of bytes to worker.
19310
19311 segment.transmuxer.postMessage({
19312 action: 'pushMp4Captions',
19313 timescales: segment.map.timescales,
19314 trackIds: [tracks.video.id],
19315 data: buffer,
19316 byteOffset: byteOffset,
19317 byteLength: bytes.byteLength
19318 }, [buffer]);
19319 return;
19320 } // VTT or other segments that don't need processing
19321
19322
19323 if (!segment.transmuxer) {
19324 doneFn(null, segment, {});
19325 return;
19326 }
19327
19328 if (typeof segment.container === 'undefined') {
19329 segment.container = detectContainerForBytes(bytesAsUint8Array);
19330 }
19331
19332 if (segment.container !== 'ts' && segment.container !== 'aac') {
19333 trackInfoFn(segment, {
19334 hasAudio: false,
19335 hasVideo: false
19336 });
19337 doneFn(null, segment, {});
19338 return;
19339 } // ts or aac
19340
19341
19342 transmuxAndNotify({
19343 segment: segment,
19344 bytes: bytes,
19345 isPartial: isPartial,
19346 trackInfoFn: trackInfoFn,
19347 timingInfoFn: timingInfoFn,
19348 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19349 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19350 id3Fn: id3Fn,
19351 captionsFn: captionsFn,
19352 isEndOfTimeline: isEndOfTimeline,
19353 endedTimelineFn: endedTimelineFn,
19354 dataFn: dataFn,
19355 doneFn: doneFn
19356 });
19357 };
19358 /**
19359 * Decrypt the segment via the decryption web worker
19360 *
19361 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19362 * routines
19363 * @param {Object} segment - a simplified copy of the segmentInfo object
19364 * from SegmentLoader
19365 * @param {Function} trackInfoFn - a callback that receives track info
19366 * @param {Function} timingInfoFn - a callback that receives timing info
19367 * @param {Function} videoSegmentTimingInfoFn
19368 * a callback that receives video timing info based on media times and
19369 * any adjustments made by the transmuxer
19370 * @param {Function} audioSegmentTimingInfoFn
19371 * a callback that receives audio timing info based on media times and
19372 * any adjustments made by the transmuxer
19373 * @param {boolean} isEndOfTimeline
19374 * true if this segment represents the last segment in a timeline
19375 * @param {Function} endedTimelineFn
19376 * a callback made when a timeline is ended, will only be called if
19377 * isEndOfTimeline is true
19378 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19379 * and ready to use
19380 * @param {Function} doneFn - a callback that is executed after decryption has completed
19381 */
19382
19383
19384 var decryptSegment = function decryptSegment(_ref5) {
19385 var decryptionWorker = _ref5.decryptionWorker,
19386 segment = _ref5.segment,
19387 trackInfoFn = _ref5.trackInfoFn,
19388 timingInfoFn = _ref5.timingInfoFn,
19389 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
19390 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
19391 id3Fn = _ref5.id3Fn,
19392 captionsFn = _ref5.captionsFn,
19393 isEndOfTimeline = _ref5.isEndOfTimeline,
19394 endedTimelineFn = _ref5.endedTimelineFn,
19395 dataFn = _ref5.dataFn,
19396 doneFn = _ref5.doneFn;
19397
19398 var decryptionHandler = function decryptionHandler(event) {
19399 if (event.data.source === segment.requestId) {
19400 decryptionWorker.removeEventListener('message', decryptionHandler);
19401 var decrypted = event.data.decrypted;
19402 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
19403 handleSegmentBytes({
19404 segment: segment,
19405 bytes: segment.bytes,
19406 isPartial: false,
19407 trackInfoFn: trackInfoFn,
19408 timingInfoFn: timingInfoFn,
19409 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19410 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19411 id3Fn: id3Fn,
19412 captionsFn: captionsFn,
19413 isEndOfTimeline: isEndOfTimeline,
19414 endedTimelineFn: endedTimelineFn,
19415 dataFn: dataFn,
19416 doneFn: doneFn
19417 });
19418 }
19419 };
19420
19421 decryptionWorker.addEventListener('message', decryptionHandler);
19422 var keyBytes;
19423
19424 if (segment.key.bytes.slice) {
19425 keyBytes = segment.key.bytes.slice();
19426 } else {
19427 keyBytes = new Uint32Array(Array.prototype.slice.call(segment.key.bytes));
19428 } // this is an encrypted segment
19429 // incrementally decrypt the segment
19430
19431
19432 decryptionWorker.postMessage(createTransferableMessage({
19433 source: segment.requestId,
19434 encrypted: segment.encryptedBytes,
19435 key: keyBytes,
19436 iv: segment.key.iv
19437 }), [segment.encryptedBytes.buffer, keyBytes.buffer]);
19438 };
19439 /**
19440 * This function waits for all XHRs to finish (with either success or failure)
19441 * before continueing processing via it's callback. The function gathers errors
19442 * from each request into a single errors array so that the error status for
19443 * each request can be examined later.
19444 *
19445 * @param {Object} activeXhrs - an object that tracks all XHR requests
19446 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19447 * routines
19448 * @param {Function} trackInfoFn - a callback that receives track info
19449 * @param {Function} timingInfoFn - a callback that receives timing info
19450 * @param {Function} videoSegmentTimingInfoFn
19451 * a callback that receives video timing info based on media times and
19452 * any adjustments made by the transmuxer
19453 * @param {Function} audioSegmentTimingInfoFn
19454 * a callback that receives audio timing info based on media times and
19455 * any adjustments made by the transmuxer
19456 * @param {Function} id3Fn - a callback that receives ID3 metadata
19457 * @param {Function} captionsFn - a callback that receives captions
19458 * @param {boolean} isEndOfTimeline
19459 * true if this segment represents the last segment in a timeline
19460 * @param {Function} endedTimelineFn
19461 * a callback made when a timeline is ended, will only be called if
19462 * isEndOfTimeline is true
19463 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19464 * and ready to use
19465 * @param {Function} doneFn - a callback that is executed after all resources have been
19466 * downloaded and any decryption completed
19467 */
19468
19469
19470 var waitForCompletion = function waitForCompletion(_ref6) {
19471 var activeXhrs = _ref6.activeXhrs,
19472 decryptionWorker = _ref6.decryptionWorker,
19473 trackInfoFn = _ref6.trackInfoFn,
19474 timingInfoFn = _ref6.timingInfoFn,
19475 videoSegmentTimingInfoFn = _ref6.videoSegmentTimingInfoFn,
19476 audioSegmentTimingInfoFn = _ref6.audioSegmentTimingInfoFn,
19477 id3Fn = _ref6.id3Fn,
19478 captionsFn = _ref6.captionsFn,
19479 isEndOfTimeline = _ref6.isEndOfTimeline,
19480 endedTimelineFn = _ref6.endedTimelineFn,
19481 dataFn = _ref6.dataFn,
19482 doneFn = _ref6.doneFn;
19483 var count = 0;
19484 var didError = false;
19485 return function (error, segment) {
19486 if (didError) {
19487 return;
19488 }
19489
19490 if (error) {
19491 didError = true; // If there are errors, we have to abort any outstanding requests
19492
19493 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
19494 // handle the aborted events from those requests, there are some cases where we may
19495 // never get an aborted event. For instance, if the network connection is lost and
19496 // there were two requests, the first may have triggered an error immediately, while
19497 // the second request remains unsent. In that case, the aborted algorithm will not
19498 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
19499 //
19500 // We also can't rely on the ready state of the XHR, since the request that
19501 // triggered the connection error may also show as a ready state of 0 (unsent).
19502 // Therefore, we have to finish this group of requests immediately after the first
19503 // seen error.
19504
19505 return doneFn(error, segment);
19506 }
19507
19508 count += 1;
19509
19510 if (count === activeXhrs.length) {
19511 // Keep track of when *all* of the requests have completed
19512 segment.endOfAllRequests = Date.now();
19513
19514 if (segment.encryptedBytes) {
19515 return decryptSegment({
19516 decryptionWorker: decryptionWorker,
19517 segment: segment,
19518 trackInfoFn: trackInfoFn,
19519 timingInfoFn: timingInfoFn,
19520 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19521 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19522 id3Fn: id3Fn,
19523 captionsFn: captionsFn,
19524 isEndOfTimeline: isEndOfTimeline,
19525 endedTimelineFn: endedTimelineFn,
19526 dataFn: dataFn,
19527 doneFn: doneFn
19528 });
19529 } // Otherwise, everything is ready just continue
19530
19531
19532 handleSegmentBytes({
19533 segment: segment,
19534 bytes: segment.bytes,
19535 isPartial: false,
19536 trackInfoFn: trackInfoFn,
19537 timingInfoFn: timingInfoFn,
19538 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19539 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19540 id3Fn: id3Fn,
19541 captionsFn: captionsFn,
19542 isEndOfTimeline: isEndOfTimeline,
19543 endedTimelineFn: endedTimelineFn,
19544 dataFn: dataFn,
19545 doneFn: doneFn
19546 });
19547 }
19548 };
19549 };
19550 /**
19551 * Calls the abort callback if any request within the batch was aborted. Will only call
19552 * the callback once per batch of requests, even if multiple were aborted.
19553 *
19554 * @param {Object} loadendState - state to check to see if the abort function was called
19555 * @param {Function} abortFn - callback to call for abort
19556 */
19557
19558
19559 var handleLoadEnd = function handleLoadEnd(_ref7) {
19560 var loadendState = _ref7.loadendState,
19561 abortFn = _ref7.abortFn;
19562 return function (event) {
19563 var request = event.target;
19564
19565 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
19566 abortFn();
19567 loadendState.calledAbortFn = true;
19568 }
19569 };
19570 };
19571 /**
19572 * Simple progress event callback handler that gathers some stats before
19573 * executing a provided callback with the `segment` object
19574 *
19575 * @param {Object} segment - a simplified copy of the segmentInfo object
19576 * from SegmentLoader
19577 * @param {Function} progressFn - a callback that is executed each time a progress event
19578 * is received
19579 * @param {Function} trackInfoFn - a callback that receives track info
19580 * @param {Function} timingInfoFn - a callback that receives timing info
19581 * @param {Function} videoSegmentTimingInfoFn
19582 * a callback that receives video timing info based on media times and
19583 * any adjustments made by the transmuxer
19584 * @param {Function} audioSegmentTimingInfoFn
19585 * a callback that receives audio timing info based on media times and
19586 * any adjustments made by the transmuxer
19587 * @param {boolean} isEndOfTimeline
19588 * true if this segment represents the last segment in a timeline
19589 * @param {Function} endedTimelineFn
19590 * a callback made when a timeline is ended, will only be called if
19591 * isEndOfTimeline is true
19592 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19593 * and ready to use
19594 * @param {Event} event - the progress event object from XMLHttpRequest
19595 */
19596
19597
19598 var handleProgress = function handleProgress(_ref8) {
19599 var segment = _ref8.segment,
19600 progressFn = _ref8.progressFn,
19601 trackInfoFn = _ref8.trackInfoFn,
19602 timingInfoFn = _ref8.timingInfoFn,
19603 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
19604 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
19605 id3Fn = _ref8.id3Fn,
19606 captionsFn = _ref8.captionsFn,
19607 isEndOfTimeline = _ref8.isEndOfTimeline,
19608 endedTimelineFn = _ref8.endedTimelineFn,
19609 dataFn = _ref8.dataFn,
19610 handlePartialData = _ref8.handlePartialData;
19611 return function (event) {
19612 var request = event.target;
19613
19614 if (request.aborted) {
19615 return;
19616 } // don't support encrypted segments or fmp4 for now
19617
19618
19619 if (handlePartialData && !segment.key && // although responseText "should" exist, this guard serves to prevent an error being
19620 // thrown on the next check for two primary cases:
19621 // 1. the mime type override stops working, or is not implemented for a specific
19622 // browser
19623 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
19624 request.responseText && // in order to determine if it's an fmp4 we need at least 8 bytes
19625 request.responseText.length >= 8) {
19626 var newBytes = stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
19627
19628 if (segment.lastReachedChar || !isLikelyFmp4MediaSegment(new Uint8Array(newBytes))) {
19629 segment.lastReachedChar = request.responseText.length;
19630 handleSegmentBytes({
19631 segment: segment,
19632 bytes: newBytes,
19633 isPartial: true,
19634 trackInfoFn: trackInfoFn,
19635 timingInfoFn: timingInfoFn,
19636 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19637 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19638 id3Fn: id3Fn,
19639 captionsFn: captionsFn,
19640 isEndOfTimeline: isEndOfTimeline,
19641 endedTimelineFn: endedTimelineFn,
19642 dataFn: dataFn
19643 });
19644 }
19645 }
19646
19647 segment.stats = videojs__default['default'].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
19648
19649 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
19650 segment.stats.firstBytesReceivedAt = Date.now();
19651 }
19652
19653 return progressFn(event, segment);
19654 };
19655 };
19656 /**
19657 * Load all resources and does any processing necessary for a media-segment
19658 *
19659 * Features:
19660 * decrypts the media-segment if it has a key uri and an iv
19661 * aborts *all* requests if *any* one request fails
19662 *
19663 * The segment object, at minimum, has the following format:
19664 * {
19665 * resolvedUri: String,
19666 * [transmuxer]: Object,
19667 * [byterange]: {
19668 * offset: Number,
19669 * length: Number
19670 * },
19671 * [key]: {
19672 * resolvedUri: String
19673 * [byterange]: {
19674 * offset: Number,
19675 * length: Number
19676 * },
19677 * iv: {
19678 * bytes: Uint32Array
19679 * }
19680 * },
19681 * [map]: {
19682 * resolvedUri: String,
19683 * [byterange]: {
19684 * offset: Number,
19685 * length: Number
19686 * },
19687 * [bytes]: Uint8Array
19688 * }
19689 * }
19690 * ...where [name] denotes optional properties
19691 *
19692 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
19693 * @param {Object} xhrOptions - the base options to provide to all xhr requests
19694 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
19695 * decryption routines
19696 * @param {Object} segment - a simplified copy of the segmentInfo object
19697 * from SegmentLoader
19698 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
19699 * aborted
19700 * @param {Function} progressFn - a callback that receives progress events from the main
19701 * segment's xhr request
19702 * @param {Function} trackInfoFn - a callback that receives track info
19703 * @param {Function} timingInfoFn - a callback that receives timing info
19704 * @param {Function} videoSegmentTimingInfoFn
19705 * a callback that receives video timing info based on media times and
19706 * any adjustments made by the transmuxer
19707 * @param {Function} audioSegmentTimingInfoFn
19708 * a callback that receives audio timing info based on media times and
19709 * any adjustments made by the transmuxer
19710 * @param {Function} id3Fn - a callback that receives ID3 metadata
19711 * @param {Function} captionsFn - a callback that receives captions
19712 * @param {boolean} isEndOfTimeline
19713 * true if this segment represents the last segment in a timeline
19714 * @param {Function} endedTimelineFn
19715 * a callback made when a timeline is ended, will only be called if
19716 * isEndOfTimeline is true
19717 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
19718 * request, transmuxed if needed
19719 * @param {Function} doneFn - a callback that is executed only once all requests have
19720 * succeeded or failed
19721 * @return {Function} a function that, when invoked, immediately aborts all
19722 * outstanding requests
19723 */
19724
19725
19726 var mediaSegmentRequest = function mediaSegmentRequest(_ref9) {
19727 var xhr = _ref9.xhr,
19728 xhrOptions = _ref9.xhrOptions,
19729 decryptionWorker = _ref9.decryptionWorker,
19730 segment = _ref9.segment,
19731 abortFn = _ref9.abortFn,
19732 progressFn = _ref9.progressFn,
19733 trackInfoFn = _ref9.trackInfoFn,
19734 timingInfoFn = _ref9.timingInfoFn,
19735 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
19736 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
19737 id3Fn = _ref9.id3Fn,
19738 captionsFn = _ref9.captionsFn,
19739 isEndOfTimeline = _ref9.isEndOfTimeline,
19740 endedTimelineFn = _ref9.endedTimelineFn,
19741 dataFn = _ref9.dataFn,
19742 doneFn = _ref9.doneFn,
19743 handlePartialData = _ref9.handlePartialData;
19744 var activeXhrs = [];
19745 var finishProcessingFn = waitForCompletion({
19746 activeXhrs: activeXhrs,
19747 decryptionWorker: decryptionWorker,
19748 trackInfoFn: trackInfoFn,
19749 timingInfoFn: timingInfoFn,
19750 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19751 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19752 id3Fn: id3Fn,
19753 captionsFn: captionsFn,
19754 isEndOfTimeline: isEndOfTimeline,
19755 endedTimelineFn: endedTimelineFn,
19756 dataFn: dataFn,
19757 doneFn: doneFn
19758 }); // optionally, request the decryption key
19759
19760 if (segment.key && !segment.key.bytes) {
19761 var keyRequestOptions = videojs__default['default'].mergeOptions(xhrOptions, {
19762 uri: segment.key.resolvedUri,
19763 responseType: 'arraybuffer'
19764 });
19765 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
19766 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
19767 activeXhrs.push(keyXhr);
19768 } // optionally, request the associated media init segment
19769
19770
19771 if (segment.map && !segment.map.bytes) {
19772 var initSegmentOptions = videojs__default['default'].mergeOptions(xhrOptions, {
19773 uri: segment.map.resolvedUri,
19774 responseType: 'arraybuffer',
19775 headers: segmentXhrHeaders(segment.map)
19776 });
19777 var initSegmentRequestCallback = handleInitSegmentResponse({
19778 segment: segment,
19779 finishProcessingFn: finishProcessingFn
19780 });
19781 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
19782 activeXhrs.push(initSegmentXhr);
19783 }
19784
19785 var segmentRequestOptions = videojs__default['default'].mergeOptions(xhrOptions, {
19786 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
19787 responseType: 'arraybuffer',
19788 headers: segmentXhrHeaders(segment)
19789 });
19790
19791 if (handlePartialData) {
19792 // setting to text is required for partial responses
19793 // conversion to ArrayBuffer happens later
19794 segmentRequestOptions.responseType = 'text';
19795
19796 segmentRequestOptions.beforeSend = function (xhrObject) {
19797 // XHR binary charset opt by Marcus Granado 2006 [http://mgran.blogspot.com]
19798 // makes the browser pass through the "text" unparsed
19799 xhrObject.overrideMimeType('text/plain; charset=x-user-defined');
19800 };
19801 }
19802
19803 var segmentRequestCallback = handleSegmentResponse({
19804 segment: segment,
19805 finishProcessingFn: finishProcessingFn,
19806 responseType: segmentRequestOptions.responseType
19807 });
19808 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
19809 segmentXhr.addEventListener('progress', handleProgress({
19810 segment: segment,
19811 progressFn: progressFn,
19812 trackInfoFn: trackInfoFn,
19813 timingInfoFn: timingInfoFn,
19814 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19815 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19816 id3Fn: id3Fn,
19817 captionsFn: captionsFn,
19818 isEndOfTimeline: isEndOfTimeline,
19819 endedTimelineFn: endedTimelineFn,
19820 dataFn: dataFn,
19821 handlePartialData: handlePartialData
19822 }));
19823 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
19824 // multiple times, provide a shared state object
19825
19826 var loadendState = {};
19827 activeXhrs.forEach(function (activeXhr) {
19828 activeXhr.addEventListener('loadend', handleLoadEnd({
19829 loadendState: loadendState,
19830 abortFn: abortFn
19831 }));
19832 });
19833 return function () {
19834 return abortAll(activeXhrs);
19835 };
19836 };
19837
19838 /**
19839 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
19840 * codec strings, or translating codec strings into objects that can be examined.
19841 */
19842 var logFn$1 = logger('CodecUtils');
19843 /**
19844 * Returns a set of codec strings parsed from the playlist or the default
19845 * codec strings if no codecs were specified in the playlist
19846 *
19847 * @param {Playlist} media the current media playlist
19848 * @return {Object} an object with the video and audio codecs
19849 */
19850
19851 var getCodecs = function getCodecs(media) {
19852 // if the codecs were explicitly specified, use them instead of the
19853 // defaults
19854 var mediaAttributes = media.attributes || {};
19855
19856 if (mediaAttributes.CODECS) {
19857 return parseCodecs(mediaAttributes.CODECS);
19858 }
19859 };
19860
19861 var isMaat = function isMaat(master, media) {
19862 var mediaAttributes = media.attributes || {};
19863 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
19864 };
19865 var isMuxed = function isMuxed(master, media) {
19866 if (!isMaat(master, media)) {
19867 return true;
19868 }
19869
19870 var mediaAttributes = media.attributes || {};
19871 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
19872
19873 for (var groupId in audioGroup) {
19874 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
19875 // or there are listed playlists (the case for DASH, as the manifest will have already
19876 // provided all of the details necessary to generate the audio playlist, as opposed to
19877 // HLS' externally requested playlists), then the content is demuxed.
19878 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
19879 return true;
19880 }
19881 }
19882
19883 return false;
19884 };
19885 var unwrapCodecList = function unwrapCodecList(codecList) {
19886 var codecs = {};
19887 codecList.forEach(function (_ref) {
19888 var mediaType = _ref.mediaType,
19889 type = _ref.type,
19890 details = _ref.details;
19891 codecs[mediaType] = codecs[mediaType] || [];
19892 codecs[mediaType].push(translateLegacyCodec("" + type + details));
19893 });
19894 Object.keys(codecs).forEach(function (mediaType) {
19895 if (codecs[mediaType].length > 1) {
19896 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
19897 codecs[mediaType] = null;
19898 return;
19899 }
19900
19901 codecs[mediaType] = codecs[mediaType][0];
19902 });
19903 return codecs;
19904 };
19905 var codecCount = function codecCount(codecObj) {
19906 var count = 0;
19907
19908 if (codecObj.audio) {
19909 count++;
19910 }
19911
19912 if (codecObj.video) {
19913 count++;
19914 }
19915
19916 return count;
19917 };
19918 /**
19919 * Calculates the codec strings for a working configuration of
19920 * SourceBuffers to play variant streams in a master playlist. If
19921 * there is no possible working configuration, an empty object will be
19922 * returned.
19923 *
19924 * @param master {Object} the m3u8 object for the master playlist
19925 * @param media {Object} the m3u8 object for the variant playlist
19926 * @return {Object} the codec strings.
19927 *
19928 * @private
19929 */
19930
19931 var codecsForPlaylist = function codecsForPlaylist(master, media) {
19932 var mediaAttributes = media.attributes || {};
19933 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
19934 // Put another way, there is no way to have a video-only multiple-audio HLS!
19935
19936 if (isMaat(master, media) && !codecInfo.audio) {
19937 if (!isMuxed(master, media)) {
19938 // It is possible for codecs to be specified on the audio media group playlist but
19939 // not on the rendition playlist. This is mostly the case for DASH, where audio and
19940 // video are always separate (and separately specified).
19941 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
19942
19943 if (defaultCodecs.audio) {
19944 codecInfo.audio = defaultCodecs.audio;
19945 }
19946 }
19947 }
19948
19949 return codecInfo;
19950 };
19951
19952 var logFn = logger('PlaylistSelector');
19953
19954 var representationToString = function representationToString(representation) {
19955 if (!representation || !representation.playlist) {
19956 return;
19957 }
19958
19959 var playlist = representation.playlist;
19960 return JSON.stringify({
19961 id: playlist.id,
19962 bandwidth: representation.bandwidth,
19963 width: representation.width,
19964 height: representation.height,
19965 codecs: playlist.attributes && playlist.attributes.CODECS || ''
19966 });
19967 }; // Utilities
19968
19969 /**
19970 * Returns the CSS value for the specified property on an element
19971 * using `getComputedStyle`. Firefox has a long-standing issue where
19972 * getComputedStyle() may return null when running in an iframe with
19973 * `display: none`.
19974 *
19975 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
19976 * @param {HTMLElement} el the htmlelement to work on
19977 * @param {string} the proprety to get the style for
19978 */
19979
19980
19981 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
19982 if (!el) {
19983 return '';
19984 }
19985
19986 var result = window__default['default'].getComputedStyle(el);
19987
19988 if (!result) {
19989 return '';
19990 }
19991
19992 return result[property];
19993 };
19994 /**
19995 * Resuable stable sort function
19996 *
19997 * @param {Playlists} array
19998 * @param {Function} sortFn Different comparators
19999 * @function stableSort
20000 */
20001
20002
20003 var stableSort = function stableSort(array, sortFn) {
20004 var newArray = array.slice();
20005 array.sort(function (left, right) {
20006 var cmp = sortFn(left, right);
20007
20008 if (cmp === 0) {
20009 return newArray.indexOf(left) - newArray.indexOf(right);
20010 }
20011
20012 return cmp;
20013 });
20014 };
20015 /**
20016 * A comparator function to sort two playlist object by bandwidth.
20017 *
20018 * @param {Object} left a media playlist object
20019 * @param {Object} right a media playlist object
20020 * @return {number} Greater than zero if the bandwidth attribute of
20021 * left is greater than the corresponding attribute of right. Less
20022 * than zero if the bandwidth of right is greater than left and
20023 * exactly zero if the two are equal.
20024 */
20025
20026
20027 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
20028 var leftBandwidth;
20029 var rightBandwidth;
20030
20031 if (left.attributes.BANDWIDTH) {
20032 leftBandwidth = left.attributes.BANDWIDTH;
20033 }
20034
20035 leftBandwidth = leftBandwidth || window__default['default'].Number.MAX_VALUE;
20036
20037 if (right.attributes.BANDWIDTH) {
20038 rightBandwidth = right.attributes.BANDWIDTH;
20039 }
20040
20041 rightBandwidth = rightBandwidth || window__default['default'].Number.MAX_VALUE;
20042 return leftBandwidth - rightBandwidth;
20043 };
20044 /**
20045 * A comparator function to sort two playlist object by resolution (width).
20046 *
20047 * @param {Object} left a media playlist object
20048 * @param {Object} right a media playlist object
20049 * @return {number} Greater than zero if the resolution.width attribute of
20050 * left is greater than the corresponding attribute of right. Less
20051 * than zero if the resolution.width of right is greater than left and
20052 * exactly zero if the two are equal.
20053 */
20054
20055 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
20056 var leftWidth;
20057 var rightWidth;
20058
20059 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
20060 leftWidth = left.attributes.RESOLUTION.width;
20061 }
20062
20063 leftWidth = leftWidth || window__default['default'].Number.MAX_VALUE;
20064
20065 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
20066 rightWidth = right.attributes.RESOLUTION.width;
20067 }
20068
20069 rightWidth = rightWidth || window__default['default'].Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
20070 // have the same media dimensions/ resolution
20071
20072 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
20073 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
20074 }
20075
20076 return leftWidth - rightWidth;
20077 };
20078 /**
20079 * Chooses the appropriate media playlist based on bandwidth and player size
20080 *
20081 * @param {Object} master
20082 * Object representation of the master manifest
20083 * @param {number} playerBandwidth
20084 * Current calculated bandwidth of the player
20085 * @param {number} playerWidth
20086 * Current width of the player element (should account for the device pixel ratio)
20087 * @param {number} playerHeight
20088 * Current height of the player element (should account for the device pixel ratio)
20089 * @param {boolean} limitRenditionByPlayerDimensions
20090 * True if the player width and height should be used during the selection, false otherwise
20091 * @param {Object} masterPlaylistController
20092 * the current masterPlaylistController object
20093 * @return {Playlist} the highest bitrate playlist less than the
20094 * currently detected bandwidth, accounting for some amount of
20095 * bandwidth variance
20096 */
20097
20098 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
20099 // If we end up getting called before `master` is available, exit early
20100 if (!master) {
20101 return;
20102 }
20103
20104 var options = {
20105 bandwidth: playerBandwidth,
20106 width: playerWidth,
20107 height: playerHeight,
20108 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
20109 };
20110 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
20111
20112 if (Playlist.isAudioOnly(master)) {
20113 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
20114 // at the buttom of this function for debugging.
20115
20116 options.audioOnly = true;
20117 } // convert the playlists to an intermediary representation to make comparisons easier
20118
20119
20120 var sortedPlaylistReps = playlists.map(function (playlist) {
20121 var bandwidth;
20122 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
20123 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
20124 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
20125 bandwidth = bandwidth || window__default['default'].Number.MAX_VALUE;
20126 return {
20127 bandwidth: bandwidth,
20128 width: width,
20129 height: height,
20130 playlist: playlist
20131 };
20132 });
20133 stableSort(sortedPlaylistReps, function (left, right) {
20134 return left.bandwidth - right.bandwidth;
20135 }); // filter out any playlists that have been excluded due to
20136 // incompatible configurations
20137
20138 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20139 return !Playlist.isIncompatible(rep.playlist);
20140 }); // filter out any playlists that have been disabled manually through the representations
20141 // api or blacklisted temporarily due to playback errors.
20142
20143 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20144 return Playlist.isEnabled(rep.playlist);
20145 });
20146
20147 if (!enabledPlaylistReps.length) {
20148 // if there are no enabled playlists, then they have all been blacklisted or disabled
20149 // by the user through the representations api. In this case, ignore blacklisting and
20150 // fallback to what the user wants by using playlists the user has not disabled.
20151 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20152 return !Playlist.isDisabled(rep.playlist);
20153 });
20154 } // filter out any variant that has greater effective bitrate
20155 // than the current estimated bandwidth
20156
20157
20158 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
20159 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
20160 });
20161 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
20162 // and then taking the very first element
20163
20164 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
20165 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20166 })[0]; // if we're not going to limit renditions by player size, make an early decision.
20167
20168 if (limitRenditionByPlayerDimensions === false) {
20169 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20170
20171 if (_chosenRep && _chosenRep.playlist) {
20172 var type = 'sortedPlaylistReps';
20173
20174 if (bandwidthBestRep) {
20175 type = 'bandwidthBestRep';
20176 }
20177
20178 if (enabledPlaylistReps[0]) {
20179 type = 'enabledPlaylistReps';
20180 }
20181
20182 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
20183 return _chosenRep.playlist;
20184 }
20185
20186 logFn('could not choose a playlist with options', options);
20187 return null;
20188 } // filter out playlists without resolution information
20189
20190
20191 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
20192 return rep.width && rep.height;
20193 }); // sort variants by resolution
20194
20195 stableSort(haveResolution, function (left, right) {
20196 return left.width - right.width;
20197 }); // if we have the exact resolution as the player use it
20198
20199 var resolutionBestRepList = haveResolution.filter(function (rep) {
20200 return rep.width === playerWidth && rep.height === playerHeight;
20201 });
20202 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
20203
20204 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
20205 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20206 })[0];
20207 var resolutionPlusOneList;
20208 var resolutionPlusOneSmallest;
20209 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
20210 // if there is no match of exact resolution
20211
20212 if (!resolutionBestRep) {
20213 resolutionPlusOneList = haveResolution.filter(function (rep) {
20214 return rep.width > playerWidth || rep.height > playerHeight;
20215 }); // find all the variants have the same smallest resolution
20216
20217 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
20218 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
20219 }); // ensure that we also pick the highest bandwidth variant that
20220 // is just-larger-than the video player
20221
20222 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
20223 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
20224 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20225 })[0];
20226 } // fallback chain of variants
20227
20228
20229 var chosenRep = resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20230
20231 if (chosenRep && chosenRep.playlist) {
20232 var _type = 'sortedPlaylistReps';
20233
20234 if (resolutionPlusOneRep) {
20235 _type = 'resolutionPlusOneRep';
20236 } else if (resolutionBestRep) {
20237 _type = 'resolutionBestRep';
20238 } else if (bandwidthBestRep) {
20239 _type = 'bandwidthBestRep';
20240 } else if (enabledPlaylistReps[0]) {
20241 _type = 'enabledPlaylistReps';
20242 }
20243
20244 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
20245 return chosenRep.playlist;
20246 }
20247
20248 logFn('could not choose a playlist with options', options);
20249 return null;
20250 }; // Playlist Selectors
20251
20252 /**
20253 * Chooses the appropriate media playlist based on the most recent
20254 * bandwidth estimate and the player size.
20255 *
20256 * Expects to be called within the context of an instance of VhsHandler
20257 *
20258 * @return {Playlist} the highest bitrate playlist less than the
20259 * currently detected bandwidth, accounting for some amount of
20260 * bandwidth variance
20261 */
20262
20263 var lastBandwidthSelector = function lastBandwidthSelector() {
20264 var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
20265 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20266 };
20267 /**
20268 * Chooses the appropriate media playlist based on an
20269 * exponential-weighted moving average of the bandwidth after
20270 * filtering for player size.
20271 *
20272 * Expects to be called within the context of an instance of VhsHandler
20273 *
20274 * @param {number} decay - a number between 0 and 1. Higher values of
20275 * this parameter will cause previous bandwidth estimates to lose
20276 * significance more quickly.
20277 * @return {Function} a function which can be invoked to create a new
20278 * playlist selector function.
20279 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
20280 */
20281
20282 var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
20283 var average = -1;
20284
20285 if (decay < 0 || decay > 1) {
20286 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
20287 }
20288
20289 return function () {
20290 var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
20291
20292 if (average < 0) {
20293 average = this.systemBandwidth;
20294 }
20295
20296 average = decay * this.systemBandwidth + (1 - decay) * average;
20297 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20298 };
20299 };
20300 /**
20301 * Chooses the appropriate media playlist based on the potential to rebuffer
20302 *
20303 * @param {Object} settings
20304 * Object of information required to use this selector
20305 * @param {Object} settings.master
20306 * Object representation of the master manifest
20307 * @param {number} settings.currentTime
20308 * The current time of the player
20309 * @param {number} settings.bandwidth
20310 * Current measured bandwidth
20311 * @param {number} settings.duration
20312 * Duration of the media
20313 * @param {number} settings.segmentDuration
20314 * Segment duration to be used in round trip time calculations
20315 * @param {number} settings.timeUntilRebuffer
20316 * Time left in seconds until the player has to rebuffer
20317 * @param {number} settings.currentTimeline
20318 * The current timeline segments are being loaded from
20319 * @param {SyncController} settings.syncController
20320 * SyncController for determining if we have a sync point for a given playlist
20321 * @return {Object|null}
20322 * {Object} return.playlist
20323 * The highest bandwidth playlist with the least amount of rebuffering
20324 * {Number} return.rebufferingImpact
20325 * The amount of time in seconds switching to this playlist will rebuffer. A
20326 * negative value means that switching will cause zero rebuffering.
20327 */
20328
20329 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
20330 var master = settings.master,
20331 currentTime = settings.currentTime,
20332 bandwidth = settings.bandwidth,
20333 duration = settings.duration,
20334 segmentDuration = settings.segmentDuration,
20335 timeUntilRebuffer = settings.timeUntilRebuffer,
20336 currentTimeline = settings.currentTimeline,
20337 syncController = settings.syncController; // filter out any playlists that have been excluded due to
20338 // incompatible configurations
20339
20340 var compatiblePlaylists = master.playlists.filter(function (playlist) {
20341 return !Playlist.isIncompatible(playlist);
20342 }); // filter out any playlists that have been disabled manually through the representations
20343 // api or blacklisted temporarily due to playback errors.
20344
20345 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
20346
20347 if (!enabledPlaylists.length) {
20348 // if there are no enabled playlists, then they have all been blacklisted or disabled
20349 // by the user through the representations api. In this case, ignore blacklisting and
20350 // fallback to what the user wants by using playlists the user has not disabled.
20351 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
20352 return !Playlist.isDisabled(playlist);
20353 });
20354 }
20355
20356 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
20357 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
20358 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
20359 // sync request first. This will double the request time
20360
20361 var numRequests = syncPoint ? 1 : 2;
20362 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
20363 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
20364 return {
20365 playlist: playlist,
20366 rebufferingImpact: rebufferingImpact
20367 };
20368 });
20369 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
20370 return estimate.rebufferingImpact <= 0;
20371 }); // Sort by bandwidth DESC
20372
20373 stableSort(noRebufferingPlaylists, function (a, b) {
20374 return comparePlaylistBandwidth(b.playlist, a.playlist);
20375 });
20376
20377 if (noRebufferingPlaylists.length) {
20378 return noRebufferingPlaylists[0];
20379 }
20380
20381 stableSort(rebufferingEstimates, function (a, b) {
20382 return a.rebufferingImpact - b.rebufferingImpact;
20383 });
20384 return rebufferingEstimates[0] || null;
20385 };
20386 /**
20387 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
20388 * one with video. If no renditions with video exist, return the lowest audio rendition.
20389 *
20390 * Expects to be called within the context of an instance of VhsHandler
20391 *
20392 * @return {Object|null}
20393 * {Object} return.playlist
20394 * The lowest bitrate playlist that contains a video codec. If no such rendition
20395 * exists pick the lowest audio rendition.
20396 */
20397
20398 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
20399 var _this = this;
20400
20401 // filter out any playlists that have been excluded due to
20402 // incompatible configurations or playback errors
20403 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
20404
20405 stableSort(playlists, function (a, b) {
20406 return comparePlaylistBandwidth(a, b);
20407 }); // Parse and assume that playlists with no video codec have no video
20408 // (this is not necessarily true, although it is generally true).
20409 //
20410 // If an entire manifest has no valid videos everything will get filtered
20411 // out.
20412
20413 var playlistsWithVideo = playlists.filter(function (playlist) {
20414 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
20415 });
20416 return playlistsWithVideo[0] || null;
20417 };
20418
20419 /**
20420 * @file text-tracks.js
20421 */
20422 /**
20423 * Create captions text tracks on video.js if they do not exist
20424 *
20425 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20426 * @param {Object} tech the video.js tech
20427 * @param {Object} captionStream the caption stream to create
20428 * @private
20429 */
20430
20431 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
20432 if (!inbandTextTracks[captionStream]) {
20433 tech.trigger({
20434 type: 'usage',
20435 name: 'vhs-608'
20436 });
20437 tech.trigger({
20438 type: 'usage',
20439 name: 'hls-608'
20440 });
20441 var track = tech.textTracks().getTrackById(captionStream);
20442
20443 if (track) {
20444 // Resuse an existing track with a CC# id because this was
20445 // very likely created by videojs-contrib-hls from information
20446 // in the m3u8 for us to use
20447 inbandTextTracks[captionStream] = track;
20448 } else {
20449 // Otherwise, create a track with the default `CC#` label and
20450 // without a language
20451 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
20452 kind: 'captions',
20453 id: captionStream,
20454 label: captionStream
20455 }, false).track;
20456 }
20457 }
20458 };
20459 /**
20460 * Add caption text track data to a source handler given an array of captions
20461 *
20462 * @param {Object}
20463 * @param {Object} inbandTextTracks the inband text tracks
20464 * @param {number} timestampOffset the timestamp offset of the source buffer
20465 * @param {Array} captionArray an array of caption data
20466 * @private
20467 */
20468
20469 var addCaptionData = function addCaptionData(_ref) {
20470 var inbandTextTracks = _ref.inbandTextTracks,
20471 captionArray = _ref.captionArray,
20472 timestampOffset = _ref.timestampOffset;
20473
20474 if (!captionArray) {
20475 return;
20476 }
20477
20478 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
20479 captionArray.forEach(function (caption) {
20480 var track = caption.stream;
20481 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
20482 });
20483 };
20484 /**
20485 * Define properties on a cue for backwards compatability,
20486 * but warn the user that the way that they are using it
20487 * is depricated and will be removed at a later date.
20488 *
20489 * @param {Cue} cue the cue to add the properties on
20490 * @private
20491 */
20492
20493 var deprecateOldCue = function deprecateOldCue(cue) {
20494 Object.defineProperties(cue.frame, {
20495 id: {
20496 get: function get() {
20497 videojs__default['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
20498 return cue.value.key;
20499 }
20500 },
20501 value: {
20502 get: function get() {
20503 videojs__default['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
20504 return cue.value.data;
20505 }
20506 },
20507 privateData: {
20508 get: function get() {
20509 videojs__default['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
20510 return cue.value.data;
20511 }
20512 }
20513 });
20514 };
20515 /**
20516 * Add metadata text track data to a source handler given an array of metadata
20517 *
20518 * @param {Object}
20519 * @param {Object} inbandTextTracks the inband text tracks
20520 * @param {Array} metadataArray an array of meta data
20521 * @param {number} timestampOffset the timestamp offset of the source buffer
20522 * @param {number} videoDuration the duration of the video
20523 * @private
20524 */
20525
20526
20527 var addMetadata = function addMetadata(_ref2) {
20528 var inbandTextTracks = _ref2.inbandTextTracks,
20529 metadataArray = _ref2.metadataArray,
20530 timestampOffset = _ref2.timestampOffset,
20531 videoDuration = _ref2.videoDuration;
20532
20533 if (!metadataArray) {
20534 return;
20535 }
20536
20537 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
20538 var metadataTrack = inbandTextTracks.metadataTrack_;
20539
20540 if (!metadataTrack) {
20541 return;
20542 }
20543
20544 metadataArray.forEach(function (metadata) {
20545 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
20546 // ignore this bit of metadata.
20547 // This likely occurs when you have an non-timed ID3 tag like TIT2,
20548 // which is the "Title/Songname/Content description" frame
20549
20550 if (typeof time !== 'number' || window__default['default'].isNaN(time) || time < 0 || !(time < Infinity)) {
20551 return;
20552 }
20553
20554 metadata.frames.forEach(function (frame) {
20555 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
20556 cue.frame = frame;
20557 cue.value = frame;
20558 deprecateOldCue(cue);
20559 metadataTrack.addCue(cue);
20560 });
20561 });
20562
20563 if (!metadataTrack.cues || !metadataTrack.cues.length) {
20564 return;
20565 } // Updating the metadeta cues so that
20566 // the endTime of each cue is the startTime of the next cue
20567 // the endTime of last cue is the duration of the video
20568
20569
20570 var cues = metadataTrack.cues;
20571 var cuesArray = []; // Create a copy of the TextTrackCueList...
20572 // ...disregarding cues with a falsey value
20573
20574 for (var i = 0; i < cues.length; i++) {
20575 if (cues[i]) {
20576 cuesArray.push(cues[i]);
20577 }
20578 } // Group cues by their startTime value
20579
20580
20581 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
20582 var timeSlot = obj[cue.startTime] || [];
20583 timeSlot.push(cue);
20584 obj[cue.startTime] = timeSlot;
20585 return obj;
20586 }, {}); // Sort startTimes by ascending order
20587
20588 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
20589 return Number(a) - Number(b);
20590 }); // Map each cue group's endTime to the next group's startTime
20591
20592 sortedStartTimes.forEach(function (startTime, idx) {
20593 var cueGroup = cuesGroupedByStartTime[startTime];
20594 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
20595
20596 cueGroup.forEach(function (cue) {
20597 cue.endTime = nextTime;
20598 });
20599 });
20600 };
20601 /**
20602 * Create metadata text track on video.js if it does not exist
20603 *
20604 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20605 * @param {string} dispatchType the inband metadata track dispatch type
20606 * @param {Object} tech the video.js tech
20607 * @private
20608 */
20609
20610 var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
20611 if (inbandTextTracks.metadataTrack_) {
20612 return;
20613 }
20614
20615 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
20616 kind: 'metadata',
20617 label: 'Timed Metadata'
20618 }, false).track;
20619 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
20620 };
20621 /**
20622 * Remove cues from a track on video.js.
20623 *
20624 * @param {Double} start start of where we should remove the cue
20625 * @param {Double} end end of where the we should remove the cue
20626 * @param {Object} track the text track to remove the cues from
20627 * @private
20628 */
20629
20630 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
20631 var i;
20632 var cue;
20633
20634 if (!track) {
20635 return;
20636 }
20637
20638 if (!track.cues) {
20639 return;
20640 }
20641
20642 i = track.cues.length;
20643
20644 while (i--) {
20645 cue = track.cues[i]; // Remove any cue within the provided start and end time
20646
20647 if (cue.startTime >= start && cue.endTime <= end) {
20648 track.removeCue(cue);
20649 }
20650 }
20651 };
20652 /**
20653 * Remove duplicate cues from a track on video.js (a cue is considered a
20654 * duplicate if it has the same time interval and text as another)
20655 *
20656 * @param {Object} track the text track to remove the duplicate cues from
20657 * @private
20658 */
20659
20660 var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
20661 var cues = track.cues;
20662
20663 if (!cues) {
20664 return;
20665 }
20666
20667 for (var i = 0; i < cues.length; i++) {
20668 var duplicates = [];
20669 var occurrences = 0;
20670
20671 for (var j = 0; j < cues.length; j++) {
20672 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
20673 occurrences++;
20674
20675 if (occurrences > 1) {
20676 duplicates.push(cues[j]);
20677 }
20678 }
20679 }
20680
20681 if (duplicates.length) {
20682 duplicates.forEach(function (dupe) {
20683 return track.removeCue(dupe);
20684 });
20685 }
20686 }
20687 };
20688
20689 /**
20690 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
20691 * front of current time.
20692 *
20693 * @param {Array} buffer
20694 * The current buffer of gop information
20695 * @param {number} currentTime
20696 * The current time
20697 * @param {Double} mapping
20698 * Offset to map display time to stream presentation time
20699 * @return {Array}
20700 * List of gops considered safe to append over
20701 */
20702
20703 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
20704 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
20705 return [];
20706 } // pts value for current time + 3 seconds to give a bit more wiggle room
20707
20708
20709 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
20710 var i;
20711
20712 for (i = 0; i < buffer.length; i++) {
20713 if (buffer[i].pts > currentTimePts) {
20714 break;
20715 }
20716 }
20717
20718 return buffer.slice(i);
20719 };
20720 /**
20721 * Appends gop information (timing and byteLength) received by the transmuxer for the
20722 * gops appended in the last call to appendBuffer
20723 *
20724 * @param {Array} buffer
20725 * The current buffer of gop information
20726 * @param {Array} gops
20727 * List of new gop information
20728 * @param {boolean} replace
20729 * If true, replace the buffer with the new gop information. If false, append the
20730 * new gop information to the buffer in the right location of time.
20731 * @return {Array}
20732 * Updated list of gop information
20733 */
20734
20735 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
20736 if (!gops.length) {
20737 return buffer;
20738 }
20739
20740 if (replace) {
20741 // If we are in safe append mode, then completely overwrite the gop buffer
20742 // with the most recent appeneded data. This will make sure that when appending
20743 // future segments, we only try to align with gops that are both ahead of current
20744 // time and in the last segment appended.
20745 return gops.slice();
20746 }
20747
20748 var start = gops[0].pts;
20749 var i = 0;
20750
20751 for (i; i < buffer.length; i++) {
20752 if (buffer[i].pts >= start) {
20753 break;
20754 }
20755 }
20756
20757 return buffer.slice(0, i).concat(gops);
20758 };
20759 /**
20760 * Removes gop information in buffer that overlaps with provided start and end
20761 *
20762 * @param {Array} buffer
20763 * The current buffer of gop information
20764 * @param {Double} start
20765 * position to start the remove at
20766 * @param {Double} end
20767 * position to end the remove at
20768 * @param {Double} mapping
20769 * Offset to map display time to stream presentation time
20770 */
20771
20772 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
20773 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
20774 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
20775 var updatedBuffer = buffer.slice();
20776 var i = buffer.length;
20777
20778 while (i--) {
20779 if (buffer[i].pts <= endPts) {
20780 break;
20781 }
20782 }
20783
20784 if (i === -1) {
20785 // no removal because end of remove range is before start of buffer
20786 return updatedBuffer;
20787 }
20788
20789 var j = i + 1;
20790
20791 while (j--) {
20792 if (buffer[j].pts <= startPts) {
20793 break;
20794 }
20795 } // clamp remove range start to 0 index
20796
20797
20798 j = Math.max(j, 0);
20799 updatedBuffer.splice(j, i - j + 1);
20800 return updatedBuffer;
20801 };
20802
20803 var shallowEqual = function shallowEqual(a, b) {
20804 // if both are undefined
20805 // or one or the other is undefined
20806 // they are not equal
20807 if (!a && !b || !a && b || a && !b) {
20808 return false;
20809 } // they are the same object and thus, equal
20810
20811
20812 if (a === b) {
20813 return true;
20814 } // sort keys so we can make sure they have
20815 // all the same keys later.
20816
20817
20818 var akeys = Object.keys(a).sort();
20819 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
20820
20821 if (akeys.length !== bkeys.length) {
20822 return false;
20823 }
20824
20825 for (var i = 0; i < akeys.length; i++) {
20826 var key = akeys[i]; // different sorted keys, not equal
20827
20828 if (key !== bkeys[i]) {
20829 return false;
20830 } // different values, not equal
20831
20832
20833 if (a[key] !== b[key]) {
20834 return false;
20835 }
20836 }
20837
20838 return true;
20839 };
20840
20841 // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
20842 var QUOTA_EXCEEDED_ERR = 22;
20843
20844 // number was arbitrarily chosen and may be updated in the future, but seemed reasonable
20845 // as a start to prevent any potential issues with removing content too close to the
20846 // playhead.
20847
20848 var MIN_BACK_BUFFER = 1; // in ms
20849
20850 var CHECK_BUFFER_DELAY = 500;
20851
20852 var finite = function finite(num) {
20853 return typeof num === 'number' && isFinite(num);
20854 }; // With most content hovering around 30fps, if a segment has a duration less than a half
20855 // frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
20856 // not accurately reflect the rest of the content.
20857
20858
20859 var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
20860 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
20861 // Although these checks should most likely cover non 'main' types, for now it narrows
20862 // the scope of our checks.
20863 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
20864 return null;
20865 }
20866
20867 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
20868 return 'Neither audio nor video found in segment.';
20869 }
20870
20871 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
20872 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
20873 }
20874
20875 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
20876 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
20877 }
20878
20879 return null;
20880 };
20881 /**
20882 * Calculates a time value that is safe to remove from the back buffer without interrupting
20883 * playback.
20884 *
20885 * @param {TimeRange} seekable
20886 * The current seekable range
20887 * @param {number} currentTime
20888 * The current time of the player
20889 * @param {number} targetDuration
20890 * The target duration of the current playlist
20891 * @return {number}
20892 * Time that is safe to remove from the back buffer without interrupting playback
20893 */
20894
20895 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
20896 // 30 seconds before the playhead provides a safe default for trimming.
20897 //
20898 // Choosing a reasonable default is particularly important for high bitrate content and
20899 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
20900 // throw an APPEND_BUFFER_ERR.
20901 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
20902
20903 if (seekable.length) {
20904 // Some live playlists may have a shorter window of content than the full allowed back
20905 // buffer. For these playlists, don't save content that's no longer within the window.
20906 trimTime = Math.max(trimTime, seekable.start(0));
20907 } // Don't remove within target duration of the current time to avoid the possibility of
20908 // removing the GOP currently being played, as removing it can cause playback stalls.
20909
20910
20911 var maxTrimTime = currentTime - targetDuration;
20912 return Math.min(maxTrimTime, trimTime);
20913 };
20914
20915 var segmentInfoString = function segmentInfoString(segmentInfo) {
20916 var _segmentInfo$segment = segmentInfo.segment,
20917 start = _segmentInfo$segment.start,
20918 end = _segmentInfo$segment.end,
20919 parts = _segmentInfo$segment.parts,
20920 _segmentInfo$playlist = segmentInfo.playlist,
20921 seq = _segmentInfo$playlist.mediaSequence,
20922 id = _segmentInfo$playlist.id,
20923 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
20924 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
20925 index = segmentInfo.mediaIndex,
20926 partIndex = segmentInfo.partIndex,
20927 timeline = segmentInfo.timeline;
20928 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
20929 return [name + " [" + index + "/" + (segments.length - 1) + "]", partIndex ? "part [" + partIndex + "/" + (parts.length - 1) + "]" : '', "mediaSequenceNumber [" + seq + "/" + (seq + segments.length - 1) + "]", "playlist [" + id + "]", "start/end [" + start + " => " + end + "]", "timeline [" + timeline + "]"].join(' ');
20930 };
20931
20932 var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
20933 return mediaType + "TimingInfo";
20934 };
20935 /**
20936 * Returns the timestamp offset to use for the segment.
20937 *
20938 * @param {number} segmentTimeline
20939 * The timeline of the segment
20940 * @param {number} currentTimeline
20941 * The timeline currently being followed by the loader
20942 * @param {number} startOfSegment
20943 * The estimated segment start
20944 * @param {TimeRange[]} buffered
20945 * The loader's buffer
20946 * @param {boolean} overrideCheck
20947 * If true, no checks are made to see if the timestamp offset value should be set,
20948 * but sets it directly to a value.
20949 *
20950 * @return {number|null}
20951 * Either a number representing a new timestamp offset, or null if the segment is
20952 * part of the same timeline
20953 */
20954
20955
20956 var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
20957 var segmentTimeline = _ref.segmentTimeline,
20958 currentTimeline = _ref.currentTimeline,
20959 startOfSegment = _ref.startOfSegment,
20960 buffered = _ref.buffered,
20961 overrideCheck = _ref.overrideCheck;
20962
20963 // Check to see if we are crossing a discontinuity to see if we need to set the
20964 // timestamp offset on the transmuxer and source buffer.
20965 //
20966 // Previously, we changed the timestampOffset if the start of this segment was less than
20967 // the currently set timestampOffset, but this isn't desirable as it can produce bad
20968 // behavior, especially around long running live streams.
20969 if (!overrideCheck && segmentTimeline === currentTimeline) {
20970 return null;
20971 } // When changing renditions, it's possible to request a segment on an older timeline. For
20972 // instance, given two renditions with the following:
20973 //
20974 // #EXTINF:10
20975 // segment1
20976 // #EXT-X-DISCONTINUITY
20977 // #EXTINF:10
20978 // segment2
20979 // #EXTINF:10
20980 // segment3
20981 //
20982 // And the current player state:
20983 //
20984 // current time: 8
20985 // buffer: 0 => 20
20986 //
20987 // The next segment on the current rendition would be segment3, filling the buffer from
20988 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
20989 // then the next segment to be requested will be segment1 from the new rendition in
20990 // order to fill time 8 and onwards. Using the buffered end would result in repeated
20991 // content (since it would position segment1 of the new rendition starting at 20s). This
20992 // case can be identified when the new segment's timeline is a prior value. Instead of
20993 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
20994 // more accurate to the actual start time of the segment.
20995
20996
20997 if (segmentTimeline < currentTimeline) {
20998 return startOfSegment;
20999 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
21000 // value uses the end of the last segment if it is available. While this value
21001 // should often be correct, it's better to rely on the buffered end, as the new
21002 // content post discontinuity should line up with the buffered end as if it were
21003 // time 0 for the new content.
21004
21005
21006 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
21007 };
21008 /**
21009 * Returns whether or not the loader should wait for a timeline change from the timeline
21010 * change controller before processing the segment.
21011 *
21012 * Primary timing in VHS goes by video. This is different from most media players, as
21013 * audio is more often used as the primary timing source. For the foreseeable future, VHS
21014 * will continue to use video as the primary timing source, due to the current logic and
21015 * expectations built around it.
21016
21017 * Since the timing follows video, in order to maintain sync, the video loader is
21018 * responsible for setting both audio and video source buffer timestamp offsets.
21019 *
21020 * Setting different values for audio and video source buffers could lead to
21021 * desyncing. The following examples demonstrate some of the situations where this
21022 * distinction is important. Note that all of these cases involve demuxed content. When
21023 * content is muxed, the audio and video are packaged together, therefore syncing
21024 * separate media playlists is not an issue.
21025 *
21026 * CASE 1: Audio prepares to load a new timeline before video:
21027 *
21028 * Timeline: 0 1
21029 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21030 * Audio Loader: ^
21031 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21032 * Video Loader ^
21033 *
21034 * In the above example, the audio loader is preparing to load the 6th segment, the first
21035 * after a discontinuity, while the video loader is still loading the 5th segment, before
21036 * the discontinuity.
21037 *
21038 * If the audio loader goes ahead and loads and appends the 6th segment before the video
21039 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
21040 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
21041 * the audio loader must provide the audioAppendStart value to trim the content in the
21042 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
21043 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
21044 * segment until that value is provided.
21045 *
21046 * CASE 2: Video prepares to load a new timeline before audio:
21047 *
21048 * Timeline: 0 1
21049 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21050 * Audio Loader: ^
21051 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21052 * Video Loader ^
21053 *
21054 * In the above example, the video loader is preparing to load the 6th segment, the first
21055 * after a discontinuity, while the audio loader is still loading the 5th segment, before
21056 * the discontinuity.
21057 *
21058 * If the video loader goes ahead and loads and appends the 6th segment, then once the
21059 * segment is loaded and processed, both the video and audio timestamp offsets will be
21060 * set, since video is used as the primary timing source. This is to ensure content lines
21061 * up appropriately, as any modifications to the video timing are reflected by audio when
21062 * the video loader sets the audio and video timestamp offsets to the same value. However,
21063 * setting the timestamp offset for audio before audio has had a chance to change
21064 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
21065 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
21066 *
21067 * CASE 3: When seeking, audio prepares to load a new timeline before video
21068 *
21069 * Timeline: 0 1
21070 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21071 * Audio Loader: ^
21072 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21073 * Video Loader ^
21074 *
21075 * In the above example, both audio and video loaders are loading segments from timeline
21076 * 0, but imagine that the seek originated from timeline 1.
21077 *
21078 * When seeking to a new timeline, the timestamp offset will be set based on the expected
21079 * segment start of the loaded video segment. In order to maintain sync, the audio loader
21080 * must wait for the video loader to load its segment and update both the audio and video
21081 * timestamp offsets before it may load and append its own segment. This is the case
21082 * whether the seek results in a mismatched segment request (e.g., the audio loader
21083 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
21084 * loaders choose to load the same segment index from each playlist, as the segments may
21085 * not be aligned perfectly, even for matching segment indexes.
21086 *
21087 * @param {Object} timelinechangeController
21088 * @param {number} currentTimeline
21089 * The timeline currently being followed by the loader
21090 * @param {number} segmentTimeline
21091 * The timeline of the segment being loaded
21092 * @param {('main'|'audio')} loaderType
21093 * The loader type
21094 * @param {boolean} audioDisabled
21095 * Whether the audio is disabled for the loader. This should only be true when the
21096 * loader may have muxed audio in its segment, but should not append it, e.g., for
21097 * the main loader when an alternate audio playlist is active.
21098 *
21099 * @return {boolean}
21100 * Whether the loader should wait for a timeline change from the timeline change
21101 * controller before processing the segment
21102 */
21103
21104 var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
21105 var timelineChangeController = _ref2.timelineChangeController,
21106 currentTimeline = _ref2.currentTimeline,
21107 segmentTimeline = _ref2.segmentTimeline,
21108 loaderType = _ref2.loaderType,
21109 audioDisabled = _ref2.audioDisabled;
21110
21111 if (currentTimeline === segmentTimeline) {
21112 return false;
21113 }
21114
21115 if (loaderType === 'audio') {
21116 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
21117 type: 'main'
21118 }); // Audio loader should wait if:
21119 //
21120 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
21121 // * main hasn't yet changed to the timeline audio is looking to load
21122
21123 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
21124 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
21125 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
21126 // loader's segments (or the content is audio/video only and handled by the main
21127 // loader).
21128
21129
21130 if (loaderType === 'main' && audioDisabled) {
21131 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
21132 type: 'audio'
21133 }); // Main loader should wait for the audio loader if audio is not pending a timeline
21134 // change to the current timeline.
21135 //
21136 // Since the main loader is responsible for setting the timestamp offset for both
21137 // audio and video, the main loader must wait for audio to be about to change to its
21138 // timeline before setting the offset, otherwise, if audio is behind in loading,
21139 // segments from the previous timeline would be adjusted by the new timestamp offset.
21140 //
21141 // This requirement means that video will not cross a timeline until the audio is
21142 // about to cross to it, so that way audio and video will always cross the timeline
21143 // together.
21144 //
21145 // In addition to normal timeline changes, these rules also apply to the start of a
21146 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
21147 // that these rules apply to the first timeline change because if they did not, it's
21148 // possible that the main loader will cross two timelines before the audio loader has
21149 // crossed one. Logic may be implemented to handle the startup as a special case, but
21150 // it's easier to simply treat all timeline changes the same.
21151
21152 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
21153 return false;
21154 }
21155
21156 return true;
21157 }
21158
21159 return false;
21160 };
21161 var mediaDuration = function mediaDuration(audioTimingInfo, videoTimingInfo) {
21162 var audioDuration = audioTimingInfo && typeof audioTimingInfo.start === 'number' && typeof audioTimingInfo.end === 'number' ? audioTimingInfo.end - audioTimingInfo.start : 0;
21163 var videoDuration = videoTimingInfo && typeof videoTimingInfo.start === 'number' && typeof videoTimingInfo.end === 'number' ? videoTimingInfo.end - videoTimingInfo.start : 0;
21164 return Math.max(audioDuration, videoDuration);
21165 };
21166 var segmentTooLong = function segmentTooLong(_ref3) {
21167 var segmentDuration = _ref3.segmentDuration,
21168 maxDuration = _ref3.maxDuration;
21169
21170 // 0 duration segments are most likely due to metadata only segments or a lack of
21171 // information.
21172 if (!segmentDuration) {
21173 return false;
21174 } // For HLS:
21175 //
21176 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
21177 // The EXTINF duration of each Media Segment in the Playlist
21178 // file, when rounded to the nearest integer, MUST be less than or equal
21179 // to the target duration; longer segments can trigger playback stalls
21180 // or other errors.
21181 //
21182 // For DASH, the mpd-parser uses the largest reported segment duration as the target
21183 // duration. Although that reported duration is occasionally approximate (i.e., not
21184 // exact), a strict check may report that a segment is too long more often in DASH.
21185
21186
21187 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
21188 };
21189 var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
21190 // Right now we aren't following DASH's timing model exactly, so only perform
21191 // this check for HLS content.
21192 if (sourceType !== 'hls') {
21193 return null;
21194 }
21195
21196 var segmentDuration = mediaDuration(segmentInfo.audioTimingInfo, segmentInfo.videoTimingInfo); // Don't report if we lack information.
21197 //
21198 // If the segment has a duration of 0 it is either a lack of information or a
21199 // metadata only segment and shouldn't be reported here.
21200
21201 if (!segmentDuration) {
21202 return null;
21203 }
21204
21205 var targetDuration = segmentInfo.playlist.targetDuration;
21206 var isSegmentWayTooLong = segmentTooLong({
21207 segmentDuration: segmentDuration,
21208 maxDuration: targetDuration * 2
21209 });
21210 var isSegmentSlightlyTooLong = segmentTooLong({
21211 segmentDuration: segmentDuration,
21212 maxDuration: targetDuration
21213 });
21214 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
21215
21216 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
21217 return {
21218 severity: isSegmentWayTooLong ? 'warn' : 'info',
21219 message: segmentTooLongMessage
21220 };
21221 }
21222
21223 return null;
21224 };
21225 /**
21226 * An object that manages segment loading and appending.
21227 *
21228 * @class SegmentLoader
21229 * @param {Object} options required and optional options
21230 * @extends videojs.EventTarget
21231 */
21232
21233 var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
21234 inheritsLoose(SegmentLoader, _videojs$EventTarget);
21235
21236 function SegmentLoader(settings, options) {
21237 var _this;
21238
21239 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
21240
21241 if (!settings) {
21242 throw new TypeError('Initialization settings are required');
21243 }
21244
21245 if (typeof settings.currentTime !== 'function') {
21246 throw new TypeError('No currentTime getter specified');
21247 }
21248
21249 if (!settings.mediaSource) {
21250 throw new TypeError('No MediaSource specified');
21251 } // public properties
21252
21253
21254 _this.bandwidth = settings.bandwidth;
21255 _this.throughput = {
21256 rate: 0,
21257 count: 0
21258 };
21259 _this.roundTrip = NaN;
21260
21261 _this.resetStats_();
21262
21263 _this.mediaIndex = null;
21264 _this.partIndex = null; // private settings
21265
21266 _this.hasPlayed_ = settings.hasPlayed;
21267 _this.currentTime_ = settings.currentTime;
21268 _this.seekable_ = settings.seekable;
21269 _this.seeking_ = settings.seeking;
21270 _this.duration_ = settings.duration;
21271 _this.mediaSource_ = settings.mediaSource;
21272 _this.vhs_ = settings.vhs;
21273 _this.loaderType_ = settings.loaderType;
21274 _this.currentMediaInfo_ = void 0;
21275 _this.startingMediaInfo_ = void 0;
21276 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
21277 _this.goalBufferLength_ = settings.goalBufferLength;
21278 _this.sourceType_ = settings.sourceType;
21279 _this.sourceUpdater_ = settings.sourceUpdater;
21280 _this.inbandTextTracks_ = settings.inbandTextTracks;
21281 _this.state_ = 'INIT';
21282 _this.handlePartialData_ = settings.handlePartialData;
21283 _this.timelineChangeController_ = settings.timelineChangeController;
21284 _this.shouldSaveSegmentTimingInfo_ = true;
21285 _this.parse708captions_ = settings.parse708captions; // private instance variables
21286
21287 _this.checkBufferTimeout_ = null;
21288 _this.error_ = void 0;
21289 _this.currentTimeline_ = -1;
21290 _this.pendingSegment_ = null;
21291 _this.xhrOptions_ = null;
21292 _this.pendingSegments_ = [];
21293 _this.audioDisabled_ = false;
21294 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
21295
21296 _this.gopBuffer_ = [];
21297 _this.timeMapping_ = 0;
21298 _this.safeAppend_ = videojs__default['default'].browser.IE_VERSION >= 11;
21299 _this.appendInitSegment_ = {
21300 audio: true,
21301 video: true
21302 };
21303 _this.playlistOfLastInitSegment_ = {
21304 audio: null,
21305 video: null
21306 };
21307 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
21308 // information yet to start the loading process (e.g., if the audio loader wants to
21309 // load a segment from the next timeline but the main loader hasn't yet crossed that
21310 // timeline), then the load call will be added to the queue until it is ready to be
21311 // processed.
21312
21313 _this.loadQueue_ = [];
21314 _this.metadataQueue_ = {
21315 id3: [],
21316 caption: []
21317 };
21318 _this.waitingOnRemove_ = false;
21319 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
21320
21321 _this.activeInitSegmentId_ = null;
21322 _this.initSegments_ = {}; // HLSe playback
21323
21324 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
21325 _this.keyCache_ = {};
21326 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
21327 // between a time in the display time and a segment index within
21328 // a playlist
21329
21330 _this.syncController_ = settings.syncController;
21331 _this.syncPoint_ = {
21332 segmentIndex: 0,
21333 time: 0
21334 };
21335 _this.transmuxer_ = _this.createTransmuxer_();
21336
21337 _this.triggerSyncInfoUpdate_ = function () {
21338 return _this.trigger('syncinfoupdate');
21339 };
21340
21341 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
21342
21343 _this.mediaSource_.addEventListener('sourceopen', function () {
21344 if (!_this.isEndOfStream_()) {
21345 _this.ended_ = false;
21346 }
21347 }); // ...for determining the fetch location
21348
21349
21350 _this.fetchAtBuffer_ = false;
21351 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
21352 Object.defineProperty(assertThisInitialized(_this), 'state', {
21353 get: function get() {
21354 return this.state_;
21355 },
21356 set: function set(newState) {
21357 if (newState !== this.state_) {
21358 this.logger_(this.state_ + " -> " + newState);
21359 this.state_ = newState;
21360 this.trigger('statechange');
21361 }
21362 }
21363 });
21364
21365 _this.sourceUpdater_.on('ready', function () {
21366 if (_this.hasEnoughInfoToAppend_()) {
21367 _this.processCallQueue_();
21368 }
21369 }); // Only the main loader needs to listen for pending timeline changes, as the main
21370 // loader should wait for audio to be ready to change its timeline so that both main
21371 // and audio timelines change together. For more details, see the
21372 // shouldWaitForTimelineChange function.
21373
21374
21375 if (_this.loaderType_ === 'main') {
21376 _this.timelineChangeController_.on('pendingtimelinechange', function () {
21377 if (_this.hasEnoughInfoToAppend_()) {
21378 _this.processCallQueue_();
21379 }
21380 });
21381 } // The main loader only listens on pending timeline changes, but the audio loader,
21382 // since its loads follow main, needs to listen on timeline changes. For more details,
21383 // see the shouldWaitForTimelineChange function.
21384
21385
21386 if (_this.loaderType_ === 'audio') {
21387 _this.timelineChangeController_.on('timelinechange', function () {
21388 if (_this.hasEnoughInfoToLoad_()) {
21389 _this.processLoadQueue_();
21390 }
21391
21392 if (_this.hasEnoughInfoToAppend_()) {
21393 _this.processCallQueue_();
21394 }
21395 });
21396 }
21397
21398 return _this;
21399 }
21400
21401 var _proto = SegmentLoader.prototype;
21402
21403 _proto.createTransmuxer_ = function createTransmuxer_() {
21404 return segmentTransmuxer.createTransmuxer({
21405 remux: false,
21406 alignGopsAtEnd: this.safeAppend_,
21407 keepOriginalTimestamps: true,
21408 handlePartialData: this.handlePartialData_,
21409 parse708captions: this.parse708captions_
21410 });
21411 }
21412 /**
21413 * reset all of our media stats
21414 *
21415 * @private
21416 */
21417 ;
21418
21419 _proto.resetStats_ = function resetStats_() {
21420 this.mediaBytesTransferred = 0;
21421 this.mediaRequests = 0;
21422 this.mediaRequestsAborted = 0;
21423 this.mediaRequestsTimedout = 0;
21424 this.mediaRequestsErrored = 0;
21425 this.mediaTransferDuration = 0;
21426 this.mediaSecondsLoaded = 0;
21427 }
21428 /**
21429 * dispose of the SegmentLoader and reset to the default state
21430 */
21431 ;
21432
21433 _proto.dispose = function dispose() {
21434 this.trigger('dispose');
21435 this.state = 'DISPOSED';
21436 this.pause();
21437 this.abort_();
21438
21439 if (this.transmuxer_) {
21440 this.transmuxer_.terminate();
21441 }
21442
21443 this.resetStats_();
21444
21445 if (this.checkBufferTimeout_) {
21446 window__default['default'].clearTimeout(this.checkBufferTimeout_);
21447 }
21448
21449 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
21450 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
21451 }
21452
21453 this.off();
21454 };
21455
21456 _proto.setAudio = function setAudio(enable) {
21457 this.audioDisabled_ = !enable;
21458
21459 if (enable) {
21460 this.appendInitSegment_.audio = true;
21461 } else {
21462 // remove current track audio if it gets disabled
21463 this.sourceUpdater_.removeAudio(0, this.duration_());
21464 }
21465 }
21466 /**
21467 * abort anything that is currently doing on with the SegmentLoader
21468 * and reset to a default state
21469 */
21470 ;
21471
21472 _proto.abort = function abort() {
21473 if (this.state !== 'WAITING') {
21474 if (this.pendingSegment_) {
21475 this.pendingSegment_ = null;
21476 }
21477
21478 return;
21479 }
21480
21481 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
21482 // since we are no longer "waiting" on any requests. XHR callback is not always run
21483 // when the request is aborted. This will prevent the loader from being stuck in the
21484 // WAITING state indefinitely.
21485
21486 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
21487 // next segment
21488
21489 if (!this.paused()) {
21490 this.monitorBuffer_();
21491 }
21492 }
21493 /**
21494 * abort all pending xhr requests and null any pending segements
21495 *
21496 * @private
21497 */
21498 ;
21499
21500 _proto.abort_ = function abort_() {
21501 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
21502 this.pendingSegment_.abortRequests();
21503 } // clear out the segment being processed
21504
21505
21506 this.pendingSegment_ = null;
21507 this.callQueue_ = [];
21508 this.loadQueue_ = [];
21509 this.metadataQueue_.id3 = [];
21510 this.metadataQueue_.caption = [];
21511 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
21512 this.waitingOnRemove_ = false;
21513 window__default['default'].clearTimeout(this.quotaExceededErrorRetryTimeout_);
21514 this.quotaExceededErrorRetryTimeout_ = null;
21515 };
21516
21517 _proto.checkForAbort_ = function checkForAbort_(requestId) {
21518 // If the state is APPENDING, then aborts will not modify the state, meaning the first
21519 // callback that happens should reset the state to READY so that loading can continue.
21520 if (this.state === 'APPENDING' && !this.pendingSegment_) {
21521 this.state = 'READY';
21522 return true;
21523 }
21524
21525 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
21526 return true;
21527 }
21528
21529 return false;
21530 }
21531 /**
21532 * set an error on the segment loader and null out any pending segements
21533 *
21534 * @param {Error} error the error to set on the SegmentLoader
21535 * @return {Error} the error that was set or that is currently set
21536 */
21537 ;
21538
21539 _proto.error = function error(_error) {
21540 if (typeof _error !== 'undefined') {
21541 this.logger_('error occurred:', _error);
21542 this.error_ = _error;
21543 }
21544
21545 this.pendingSegment_ = null;
21546 return this.error_;
21547 };
21548
21549 _proto.endOfStream = function endOfStream() {
21550 this.ended_ = true;
21551
21552 if (this.transmuxer_) {
21553 // need to clear out any cached data to prepare for the new segment
21554 segmentTransmuxer.reset(this.transmuxer_);
21555 }
21556
21557 this.gopBuffer_.length = 0;
21558 this.pause();
21559 this.trigger('ended');
21560 }
21561 /**
21562 * Indicates which time ranges are buffered
21563 *
21564 * @return {TimeRange}
21565 * TimeRange object representing the current buffered ranges
21566 */
21567 ;
21568
21569 _proto.buffered_ = function buffered_() {
21570 if (!this.sourceUpdater_ || !this.startingMediaInfo_) {
21571 return videojs__default['default'].createTimeRanges();
21572 }
21573
21574 if (this.loaderType_ === 'main') {
21575 var _this$startingMediaIn = this.startingMediaInfo_,
21576 hasAudio = _this$startingMediaIn.hasAudio,
21577 hasVideo = _this$startingMediaIn.hasVideo,
21578 isMuxed = _this$startingMediaIn.isMuxed;
21579
21580 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
21581 return this.sourceUpdater_.buffered();
21582 }
21583
21584 if (hasVideo) {
21585 return this.sourceUpdater_.videoBuffered();
21586 }
21587 } // One case that can be ignored for now is audio only with alt audio,
21588 // as we don't yet have proper support for that.
21589
21590
21591 return this.sourceUpdater_.audioBuffered();
21592 }
21593 /**
21594 * Gets and sets init segment for the provided map
21595 *
21596 * @param {Object} map
21597 * The map object representing the init segment to get or set
21598 * @param {boolean=} set
21599 * If true, the init segment for the provided map should be saved
21600 * @return {Object}
21601 * map object for desired init segment
21602 */
21603 ;
21604
21605 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
21606 if (set === void 0) {
21607 set = false;
21608 }
21609
21610 if (!map) {
21611 return null;
21612 }
21613
21614 var id = initSegmentId(map);
21615 var storedMap = this.initSegments_[id];
21616
21617 if (set && !storedMap && map.bytes) {
21618 this.initSegments_[id] = storedMap = {
21619 resolvedUri: map.resolvedUri,
21620 byterange: map.byterange,
21621 bytes: map.bytes,
21622 tracks: map.tracks,
21623 timescales: map.timescales
21624 };
21625 }
21626
21627 return storedMap || map;
21628 }
21629 /**
21630 * Gets and sets key for the provided key
21631 *
21632 * @param {Object} key
21633 * The key object representing the key to get or set
21634 * @param {boolean=} set
21635 * If true, the key for the provided key should be saved
21636 * @return {Object}
21637 * Key object for desired key
21638 */
21639 ;
21640
21641 _proto.segmentKey = function segmentKey(key, set) {
21642 if (set === void 0) {
21643 set = false;
21644 }
21645
21646 if (!key) {
21647 return null;
21648 }
21649
21650 var id = segmentKeyId(key);
21651 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
21652 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
21653
21654 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
21655 this.keyCache_[id] = storedKey = {
21656 resolvedUri: key.resolvedUri,
21657 bytes: key.bytes
21658 };
21659 }
21660
21661 var result = {
21662 resolvedUri: (storedKey || key).resolvedUri
21663 };
21664
21665 if (storedKey) {
21666 result.bytes = storedKey.bytes;
21667 }
21668
21669 return result;
21670 }
21671 /**
21672 * Returns true if all configuration required for loading is present, otherwise false.
21673 *
21674 * @return {boolean} True if the all configuration is ready for loading
21675 * @private
21676 */
21677 ;
21678
21679 _proto.couldBeginLoading_ = function couldBeginLoading_() {
21680 return this.playlist_ && !this.paused();
21681 }
21682 /**
21683 * load a playlist and start to fill the buffer
21684 */
21685 ;
21686
21687 _proto.load = function load() {
21688 // un-pause
21689 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
21690 // specified
21691
21692 if (!this.playlist_) {
21693 return;
21694 } // if all the configuration is ready, initialize and begin loading
21695
21696
21697 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21698 return this.init_();
21699 } // if we're in the middle of processing a segment already, don't
21700 // kick off an additional segment request
21701
21702
21703 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
21704 return;
21705 }
21706
21707 this.state = 'READY';
21708 }
21709 /**
21710 * Once all the starting parameters have been specified, begin
21711 * operation. This method should only be invoked from the INIT
21712 * state.
21713 *
21714 * @private
21715 */
21716 ;
21717
21718 _proto.init_ = function init_() {
21719 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
21720 // audio data from the muxed content should be removed
21721
21722 this.resetEverything();
21723 return this.monitorBuffer_();
21724 }
21725 /**
21726 * set a playlist on the segment loader
21727 *
21728 * @param {PlaylistLoader} media the playlist to set on the segment loader
21729 */
21730 ;
21731
21732 _proto.playlist = function playlist(newPlaylist, options) {
21733 if (options === void 0) {
21734 options = {};
21735 }
21736
21737 if (!newPlaylist) {
21738 return;
21739 }
21740
21741 var oldPlaylist = this.playlist_;
21742 var segmentInfo = this.pendingSegment_;
21743 this.playlist_ = newPlaylist;
21744 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
21745 // is always our zero-time so force a sync update each time the playlist
21746 // is refreshed from the server
21747 //
21748 // Use the INIT state to determine if playback has started, as the playlist sync info
21749 // should be fixed once requests begin (as sync points are generated based on sync
21750 // info), but not before then.
21751
21752 if (this.state === 'INIT') {
21753 newPlaylist.syncInfo = {
21754 mediaSequence: newPlaylist.mediaSequence,
21755 time: 0
21756 }; // Setting the date time mapping means mapping the program date time (if available)
21757 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
21758 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
21759 // be updated as the playlist is refreshed before the loader starts loading, the
21760 // program date time mapping needs to be updated as well.
21761 //
21762 // This mapping is only done for the main loader because a program date time should
21763 // map equivalently between playlists.
21764
21765 if (this.loaderType_ === 'main') {
21766 this.syncController_.setDateTimeMappingForStart(newPlaylist);
21767 }
21768 }
21769
21770 var oldId = null;
21771
21772 if (oldPlaylist) {
21773 if (oldPlaylist.id) {
21774 oldId = oldPlaylist.id;
21775 } else if (oldPlaylist.uri) {
21776 oldId = oldPlaylist.uri;
21777 }
21778 }
21779
21780 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
21781 // in LIVE, we always want to update with new playlists (including refreshes)
21782
21783 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
21784 // buffering now
21785
21786 if (this.state === 'INIT' && this.couldBeginLoading_()) {
21787 return this.init_();
21788 }
21789
21790 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
21791 if (this.mediaIndex !== null || this.handlePartialData_) {
21792 // we must "resync" the segment loader when we switch renditions and
21793 // the segment loader is already synced to the previous rendition
21794 //
21795 // or if we're handling partial data, we need to ensure the transmuxer is cleared
21796 // out before we start adding more data
21797 this.resyncLoader();
21798 }
21799
21800 this.currentMediaInfo_ = void 0;
21801 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
21802
21803 return;
21804 } // we reloaded the same playlist so we are in a live scenario
21805 // and we will likely need to adjust the mediaIndex
21806
21807
21808 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
21809 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
21810 // this is important because we can abort a request and this value must be
21811 // equal to the last appended mediaIndex
21812
21813 if (this.mediaIndex !== null) {
21814 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
21815 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
21816 // new playlist was incremented by 1.
21817
21818 if (this.mediaIndex < 0) {
21819 this.mediaIndex = null;
21820 this.partIndex = null;
21821 } else {
21822 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
21823 // unless parts fell off of the playlist for this segment.
21824 // In that case we need to reset partIndex and resync
21825
21826 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
21827 var mediaIndex = this.mediaIndex;
21828 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
21829 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
21830 // as the part was dropped from our current playlists segment.
21831 // The mediaIndex will still be valid so keep that around.
21832
21833 this.mediaIndex = mediaIndex;
21834 }
21835 }
21836 } // update the mediaIndex on the SegmentInfo object
21837 // this is important because we will update this.mediaIndex with this value
21838 // in `handleAppendsDone_` after the segment has been successfully appended
21839
21840
21841 if (segmentInfo) {
21842 segmentInfo.mediaIndex -= mediaSequenceDiff;
21843
21844 if (segmentInfo.mediaIndex < 0) {
21845 segmentInfo.mediaIndex = null;
21846 segmentInfo.partIndex = null;
21847 } else {
21848 // we need to update the referenced segment so that timing information is
21849 // saved for the new playlist's segment, however, if the segment fell off the
21850 // playlist, we can leave the old reference and just lose the timing info
21851 if (segmentInfo.mediaIndex >= 0) {
21852 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
21853 }
21854
21855 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
21856 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
21857 }
21858 }
21859 }
21860
21861 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
21862 }
21863 /**
21864 * Prevent the loader from fetching additional segments. If there
21865 * is a segment request outstanding, it will finish processing
21866 * before the loader halts. A segment loader can be unpaused by
21867 * calling load().
21868 */
21869 ;
21870
21871 _proto.pause = function pause() {
21872 if (this.checkBufferTimeout_) {
21873 window__default['default'].clearTimeout(this.checkBufferTimeout_);
21874 this.checkBufferTimeout_ = null;
21875 }
21876 }
21877 /**
21878 * Returns whether the segment loader is fetching additional
21879 * segments when given the opportunity. This property can be
21880 * modified through calls to pause() and load().
21881 */
21882 ;
21883
21884 _proto.paused = function paused() {
21885 return this.checkBufferTimeout_ === null;
21886 }
21887 /**
21888 * Delete all the buffered data and reset the SegmentLoader
21889 *
21890 * @param {Function} [done] an optional callback to be executed when the remove
21891 * operation is complete
21892 */
21893 ;
21894
21895 _proto.resetEverything = function resetEverything(done) {
21896 this.ended_ = false;
21897 this.appendInitSegment_ = {
21898 audio: true,
21899 video: true
21900 };
21901 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
21902 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
21903 // we then clamp the value to duration if necessary.
21904
21905 this.remove(0, Infinity, done); // clears fmp4 captions
21906
21907 if (this.transmuxer_) {
21908 this.transmuxer_.postMessage({
21909 action: 'clearAllMp4Captions'
21910 });
21911 }
21912 }
21913 /**
21914 * Force the SegmentLoader to resync and start loading around the currentTime instead
21915 * of starting at the end of the buffer
21916 *
21917 * Useful for fast quality changes
21918 */
21919 ;
21920
21921 _proto.resetLoader = function resetLoader() {
21922 this.fetchAtBuffer_ = false;
21923 this.resyncLoader();
21924 }
21925 /**
21926 * Force the SegmentLoader to restart synchronization and make a conservative guess
21927 * before returning to the simple walk-forward method
21928 */
21929 ;
21930
21931 _proto.resyncLoader = function resyncLoader() {
21932 if (this.transmuxer_) {
21933 // need to clear out any cached data to prepare for the new segment
21934 segmentTransmuxer.reset(this.transmuxer_);
21935 }
21936
21937 this.mediaIndex = null;
21938 this.partIndex = null;
21939 this.syncPoint_ = null;
21940 this.isPendingTimestampOffset_ = false;
21941 this.callQueue_ = [];
21942 this.loadQueue_ = [];
21943 this.metadataQueue_.id3 = [];
21944 this.metadataQueue_.caption = [];
21945 this.abort();
21946
21947 if (this.transmuxer_) {
21948 this.transmuxer_.postMessage({
21949 action: 'clearParsedMp4Captions'
21950 });
21951 }
21952 }
21953 /**
21954 * Remove any data in the source buffer between start and end times
21955 *
21956 * @param {number} start - the start time of the region to remove from the buffer
21957 * @param {number} end - the end time of the region to remove from the buffer
21958 * @param {Function} [done] - an optional callback to be executed when the remove
21959 * @param {boolean} force - force all remove operations to happen
21960 * operation is complete
21961 */
21962 ;
21963
21964 _proto.remove = function remove(start, end, done, force) {
21965 if (done === void 0) {
21966 done = function done() {};
21967 }
21968
21969 if (force === void 0) {
21970 force = false;
21971 }
21972
21973 // clamp end to duration if we need to remove everything.
21974 // This is due to a browser bug that causes issues if we remove to Infinity.
21975 // videojs/videojs-contrib-hls#1225
21976 if (end === Infinity) {
21977 end = this.duration_();
21978 }
21979
21980 if (!this.sourceUpdater_ || !this.startingMediaInfo_) {
21981 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
21982
21983 return;
21984 } // set it to one to complete this function's removes
21985
21986
21987 var removesRemaining = 1;
21988
21989 var removeFinished = function removeFinished() {
21990 removesRemaining--;
21991
21992 if (removesRemaining === 0) {
21993 done();
21994 }
21995 };
21996
21997 if (force || !this.audioDisabled_) {
21998 removesRemaining++;
21999 this.sourceUpdater_.removeAudio(start, end, removeFinished);
22000 } // While it would be better to only remove video if the main loader has video, this
22001 // should be safe with audio only as removeVideo will call back even if there's no
22002 // video buffer.
22003 //
22004 // In theory we can check to see if there's video before calling the remove, but in
22005 // the event that we're switching between renditions and from video to audio only
22006 // (when we add support for that), we may need to clear the video contents despite
22007 // what the new media will contain.
22008
22009
22010 if (force || this.loaderType_ === 'main') {
22011 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
22012 removesRemaining++;
22013 this.sourceUpdater_.removeVideo(start, end, removeFinished);
22014 } // remove any captions and ID3 tags
22015
22016
22017 for (var track in this.inbandTextTracks_) {
22018 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
22019 }
22020
22021 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
22022
22023 removeFinished();
22024 }
22025 /**
22026 * (re-)schedule monitorBufferTick_ to run as soon as possible
22027 *
22028 * @private
22029 */
22030 ;
22031
22032 _proto.monitorBuffer_ = function monitorBuffer_() {
22033 if (this.checkBufferTimeout_) {
22034 window__default['default'].clearTimeout(this.checkBufferTimeout_);
22035 }
22036
22037 this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), 1);
22038 }
22039 /**
22040 * As long as the SegmentLoader is in the READY state, periodically
22041 * invoke fillBuffer_().
22042 *
22043 * @private
22044 */
22045 ;
22046
22047 _proto.monitorBufferTick_ = function monitorBufferTick_() {
22048 if (this.state === 'READY') {
22049 this.fillBuffer_();
22050 }
22051
22052 if (this.checkBufferTimeout_) {
22053 window__default['default'].clearTimeout(this.checkBufferTimeout_);
22054 }
22055
22056 this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
22057 }
22058 /**
22059 * fill the buffer with segements unless the sourceBuffers are
22060 * currently updating
22061 *
22062 * Note: this function should only ever be called by monitorBuffer_
22063 * and never directly
22064 *
22065 * @private
22066 */
22067 ;
22068
22069 _proto.fillBuffer_ = function fillBuffer_() {
22070 // TODO since the source buffer maintains a queue, and we shouldn't call this function
22071 // except when we're ready for the next segment, this check can most likely be removed
22072 if (this.sourceUpdater_.updating()) {
22073 return;
22074 }
22075
22076 if (!this.syncPoint_) {
22077 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
22078 }
22079
22080 var buffered = this.buffered_(); // see if we need to begin loading immediately
22081
22082 var segmentInfo = this.checkBuffer_(buffered, this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_, this.partIndex);
22083
22084 if (!segmentInfo) {
22085 return;
22086 }
22087
22088 segmentInfo.timestampOffset = timestampOffsetForSegment({
22089 segmentTimeline: segmentInfo.timeline,
22090 currentTimeline: this.currentTimeline_,
22091 startOfSegment: segmentInfo.startOfSegment,
22092 buffered: buffered,
22093 overrideCheck: this.isPendingTimestampOffset_
22094 });
22095 this.isPendingTimestampOffset_ = false;
22096
22097 if (typeof segmentInfo.timestampOffset === 'number') {
22098 this.timelineChangeController_.pendingTimelineChange({
22099 type: this.loaderType_,
22100 from: this.currentTimeline_,
22101 to: segmentInfo.timeline
22102 });
22103 }
22104
22105 this.loadSegment_(segmentInfo);
22106 }
22107 /**
22108 * Determines if we should call endOfStream on the media source based
22109 * on the state of the buffer or if appened segment was the final
22110 * segment in the playlist.
22111 *
22112 * @param {number} [mediaIndex] the media index of segment we last appended
22113 * @param {Object} [playlist] a media playlist object
22114 * @return {boolean} do we need to call endOfStream on the MediaSource
22115 */
22116 ;
22117
22118 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
22119 if (mediaIndex === void 0) {
22120 mediaIndex = this.mediaIndex;
22121 }
22122
22123 if (playlist === void 0) {
22124 playlist = this.playlist_;
22125 }
22126
22127 if (partIndex === void 0) {
22128 partIndex = this.partIndex;
22129 }
22130
22131 if (!playlist || !this.mediaSource_) {
22132 return false;
22133 }
22134
22135 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
22136
22137 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
22138
22139 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
22140 // so that MediaSources can trigger the `ended` event when it runs out of
22141 // buffered data instead of waiting for me
22142
22143 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
22144 }
22145 /**
22146 * Determines what segment request should be made, given current playback
22147 * state.
22148 *
22149 * @param {TimeRanges} buffered - the state of the buffer
22150 * @param {Object} playlist - the playlist object to fetch segments from
22151 * @param {number} mediaIndex - the previous mediaIndex fetched or null
22152 * @param {boolean} hasPlayed - a flag indicating whether we have played or not
22153 * @param {number} currentTime - the playback position in seconds
22154 * @param {Object} syncPoint - a segment info object that describes the
22155 * @return {Object} a segment request object that describes the segment to load
22156 */
22157 ;
22158
22159 _proto.checkBuffer_ = function checkBuffer_(buffered, playlist, currentMediaIndex, hasPlayed, currentTime, syncPoint, currentPartIndex) {
22160 var lastBufferedEnd = 0;
22161
22162 if (buffered.length) {
22163 lastBufferedEnd = buffered.end(buffered.length - 1);
22164 }
22165
22166 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
22167
22168 if (!playlist.segments.length) {
22169 return null;
22170 } // if there is plenty of content buffered, and the video has
22171 // been played before relax for awhile
22172
22173
22174 if (bufferedTime >= this.goalBufferLength_()) {
22175 return null;
22176 } // if the video has not yet played once, and we already have
22177 // one segment downloaded do nothing
22178
22179
22180 if (!hasPlayed && bufferedTime >= 1) {
22181 return null;
22182 }
22183
22184 var nextPartIndex = null;
22185 var nextMediaIndex = null;
22186 var startOfSegment;
22187 var isSyncRequest = false; // When the syncPoint is null, there is no way of determining a good
22188 // conservative segment index to fetch from
22189 // The best thing to do here is to get the kind of sync-point data by
22190 // making a request
22191
22192 if (syncPoint === null) {
22193 nextMediaIndex = this.getSyncSegmentCandidate_(playlist);
22194 isSyncRequest = true;
22195 } else if (currentMediaIndex !== null) {
22196 // Under normal playback conditions fetching is a simple walk forward
22197 var segment = playlist.segments[currentMediaIndex];
22198 var partIndex = typeof currentPartIndex === 'number' ? currentPartIndex : -1;
22199 startOfSegment = segment.end ? segment.end : lastBufferedEnd;
22200
22201 if (segment.parts && segment.parts[partIndex + 1]) {
22202 nextMediaIndex = currentMediaIndex;
22203 nextPartIndex = partIndex + 1;
22204 } else {
22205 nextMediaIndex = currentMediaIndex + 1;
22206 } // There is a sync-point but the lack of a mediaIndex indicates that
22207 // we need to make a good conservative guess about which segment to
22208 // fetch
22209
22210 } else {
22211 // Find the segment containing the end of the buffer or current time.
22212 var mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, this.fetchAtBuffer_ ? lastBufferedEnd : currentTime, syncPoint.segmentIndex, syncPoint.time);
22213 nextMediaIndex = mediaSourceInfo.mediaIndex;
22214 startOfSegment = mediaSourceInfo.startTime;
22215 nextPartIndex = mediaSourceInfo.partIndex;
22216 }
22217
22218 if (typeof nextPartIndex !== 'number' && playlist.segments[nextMediaIndex] && playlist.segments[nextMediaIndex].parts) {
22219 nextPartIndex = 0;
22220 }
22221
22222 var segmentInfo = this.generateSegmentInfo_(playlist, nextMediaIndex, startOfSegment, isSyncRequest, nextPartIndex);
22223
22224 if (!segmentInfo) {
22225 return;
22226 } // if this is the last segment in the playlist
22227 // we are not seeking and end of stream has already been called
22228 // do not re-request
22229
22230
22231 if (this.mediaSource_ && this.playlist_ && segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
22232 return;
22233 }
22234
22235 this.logger_("checkBuffer_ returning " + segmentInfo.uri, {
22236 segmentInfo: segmentInfo,
22237 playlist: playlist,
22238 currentMediaIndex: currentMediaIndex,
22239 currentPartIndex: currentPartIndex,
22240 nextPartIndex: nextPartIndex,
22241 nextMediaIndex: nextMediaIndex,
22242 startOfSegment: startOfSegment,
22243 isSyncRequest: isSyncRequest
22244 });
22245 return segmentInfo;
22246 }
22247 /**
22248 * The segment loader has no recourse except to fetch a segment in the
22249 * current playlist and use the internal timestamps in that segment to
22250 * generate a syncPoint. This function returns a good candidate index
22251 * for that process.
22252 *
22253 * @param {Object} playlist - the playlist object to look for a
22254 * @return {number} An index of a segment from the playlist to load
22255 */
22256 ;
22257
22258 _proto.getSyncSegmentCandidate_ = function getSyncSegmentCandidate_(playlist) {
22259 var _this2 = this;
22260
22261 if (this.currentTimeline_ === -1) {
22262 return 0;
22263 }
22264
22265 var segmentIndexArray = playlist.segments.map(function (s, i) {
22266 return {
22267 timeline: s.timeline,
22268 segmentIndex: i
22269 };
22270 }).filter(function (s) {
22271 return s.timeline === _this2.currentTimeline_;
22272 });
22273
22274 if (segmentIndexArray.length) {
22275 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
22276 }
22277
22278 return Math.max(playlist.segments.length - 1, 0);
22279 };
22280
22281 _proto.generateSegmentInfo_ = function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest, partIndex) {
22282 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
22283 return null;
22284 }
22285
22286 var segment = playlist.segments[mediaIndex];
22287
22288 if (segment.parts && segment.parts.length && partIndex >= segment.parts.length) {
22289 return null;
22290 }
22291
22292 var part = segment.parts && segment.parts.length && segment.parts[partIndex];
22293 var audioBuffered = this.sourceUpdater_.audioBuffered();
22294 var videoBuffered = this.sourceUpdater_.videoBuffered();
22295 var audioAppendStart;
22296 var gopsToAlignWith;
22297
22298 if (audioBuffered.length) {
22299 // since the transmuxer is using the actual timing values, but the buffer is
22300 // adjusted by the timestamp offset, we must adjust the value here
22301 audioAppendStart = audioBuffered.end(audioBuffered.length - 1) - this.sourceUpdater_.audioTimestampOffset();
22302 }
22303
22304 if (videoBuffered.length) {
22305 gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
22306 // adjusted by the timestmap offset, we must adjust the value here
22307 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
22308 }
22309
22310 return {
22311 requestId: 'segment-loader-' + Math.random(),
22312 // resolve the segment URL relative to the playlist
22313 uri: part && part.resolvedUri || segment.resolvedUri,
22314 // the segment's mediaIndex at the time it was requested
22315 mediaIndex: mediaIndex,
22316 partIndex: part ? partIndex : null,
22317 // whether or not to update the SegmentLoader's state with this
22318 // segment's mediaIndex
22319 isSyncRequest: isSyncRequest,
22320 startOfSegment: startOfSegment,
22321 // the segment's playlist
22322 playlist: playlist,
22323 // unencrypted bytes of the segment
22324 bytes: null,
22325 // when a key is defined for this segment, the encrypted bytes
22326 encryptedBytes: null,
22327 // The target timestampOffset for this segment when we append it
22328 // to the source buffer
22329 timestampOffset: null,
22330 // The timeline that the segment is in
22331 timeline: segment.timeline,
22332 // The expected duration of the segment in seconds
22333 duration: segment.duration,
22334 // retain the segment in case the playlist updates while doing an async process
22335 segment: segment,
22336 part: part,
22337 byteLength: 0,
22338 transmuxer: this.transmuxer_,
22339 audioAppendStart: audioAppendStart,
22340 gopsToAlignWith: gopsToAlignWith
22341 };
22342 }
22343 /**
22344 * Determines if the network has enough bandwidth to complete the current segment
22345 * request in a timely manner. If not, the request will be aborted early and bandwidth
22346 * updated to trigger a playlist switch.
22347 *
22348 * @param {Object} stats
22349 * Object containing stats about the request timing and size
22350 * @private
22351 */
22352 ;
22353
22354 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
22355 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
22356 // TODO: Replace using timeout with a boolean indicating whether this playlist is
22357 // the lowestEnabledRendition.
22358 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
22359 !this.playlist_.attributes.BANDWIDTH) {
22360 return;
22361 } // Wait at least 1 second since the first byte of data has been received before
22362 // using the calculated bandwidth from the progress event to allow the bitrate
22363 // to stabilize
22364
22365
22366 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
22367 return;
22368 }
22369
22370 var currentTime = this.currentTime_();
22371 var measuredBandwidth = stats.bandwidth;
22372 var segmentDuration = this.pendingSegment_.duration;
22373 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
22374 // if we are only left with less than 1 second when the request completes.
22375 // A negative timeUntilRebuffering indicates we are already rebuffering
22376
22377 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
22378 // is larger than the estimated time until the player runs out of forward buffer
22379
22380 if (requestTimeRemaining <= timeUntilRebuffer$1) {
22381 return;
22382 }
22383
22384 var switchCandidate = minRebufferMaxBandwidthSelector({
22385 master: this.vhs_.playlists.master,
22386 currentTime: currentTime,
22387 bandwidth: measuredBandwidth,
22388 duration: this.duration_(),
22389 segmentDuration: segmentDuration,
22390 timeUntilRebuffer: timeUntilRebuffer$1,
22391 currentTimeline: this.currentTimeline_,
22392 syncController: this.syncController_
22393 });
22394
22395 if (!switchCandidate) {
22396 return;
22397 }
22398
22399 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
22400 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
22401 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
22402 // potential round trip time of the new request so that we are not too aggressive
22403 // with switching to a playlist that might save us a fraction of a second.
22404
22405 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
22406 minimumTimeSaving = 1;
22407 }
22408
22409 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
22410 return;
22411 } // set the bandwidth to that of the desired playlist being sure to scale by
22412 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
22413 // don't trigger a bandwidthupdate as the bandwidth is artifial
22414
22415
22416 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
22417 this.trigger('earlyabort');
22418 };
22419
22420 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
22421 this.logger_("Aborting " + segmentInfoString(segmentInfo));
22422 this.mediaRequestsAborted += 1;
22423 }
22424 /**
22425 * XHR `progress` event handler
22426 *
22427 * @param {Event}
22428 * The XHR `progress` event
22429 * @param {Object} simpleSegment
22430 * A simplified segment object copy
22431 * @private
22432 */
22433 ;
22434
22435 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
22436 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22437
22438 if (this.checkForAbort_(simpleSegment.requestId)) {
22439 return;
22440 }
22441
22442 this.trigger('progress');
22443 };
22444
22445 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
22446 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22447
22448 if (this.checkForAbort_(simpleSegment.requestId)) {
22449 return;
22450 }
22451
22452 if (this.checkForIllegalMediaSwitch(trackInfo)) {
22453 return;
22454 }
22455
22456 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
22457 // Guard against cases where we're not getting track info at all until we are
22458 // certain that all streams will provide it.
22459
22460 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
22461 this.appendInitSegment_ = {
22462 audio: true,
22463 video: true
22464 };
22465 this.startingMediaInfo_ = trackInfo;
22466 this.currentMediaInfo_ = trackInfo;
22467 this.logger_('trackinfo update', trackInfo);
22468 this.trigger('trackinfo');
22469 } // trackinfo may cause an abort if the trackinfo
22470 // causes a codec change to an unsupported codec.
22471
22472
22473 if (this.checkForAbort_(simpleSegment.requestId)) {
22474 return;
22475 } // set trackinfo on the pending segment so that
22476 // it can append.
22477
22478
22479 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
22480
22481 if (this.hasEnoughInfoToAppend_()) {
22482 this.processCallQueue_();
22483 }
22484 };
22485
22486 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
22487 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22488
22489 if (this.checkForAbort_(simpleSegment.requestId)) {
22490 return;
22491 }
22492
22493 var segmentInfo = this.pendingSegment_;
22494 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
22495 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
22496 segmentInfo[timingInfoProperty][timeType] = time;
22497 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
22498
22499 if (this.hasEnoughInfoToAppend_()) {
22500 this.processCallQueue_();
22501 }
22502 };
22503
22504 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
22505 var _this3 = this;
22506
22507 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22508
22509 if (this.checkForAbort_(simpleSegment.requestId)) {
22510 return;
22511 } // This could only happen with fmp4 segments, but
22512 // should still not happen in general
22513
22514
22515 if (captionData.length === 0) {
22516 this.logger_('SegmentLoader received no captions from a caption event');
22517 return;
22518 }
22519
22520 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
22521 // can be adjusted by the timestamp offset
22522
22523 if (!segmentInfo.hasAppendedData_) {
22524 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
22525 return;
22526 }
22527
22528 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
22529 var captionTracks = {}; // get total start/end and captions for each track/stream
22530
22531 captionData.forEach(function (caption) {
22532 // caption.stream is actually a track name...
22533 // set to the existing values in tracks or default values
22534 captionTracks[caption.stream] = captionTracks[caption.stream] || {
22535 // Infinity, as any other value will be less than this
22536 startTime: Infinity,
22537 captions: [],
22538 // 0 as an other value will be more than this
22539 endTime: 0
22540 };
22541 var captionTrack = captionTracks[caption.stream];
22542 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
22543 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
22544 captionTrack.captions.push(caption);
22545 });
22546 Object.keys(captionTracks).forEach(function (trackName) {
22547 var _captionTracks$trackN = captionTracks[trackName],
22548 startTime = _captionTracks$trackN.startTime,
22549 endTime = _captionTracks$trackN.endTime,
22550 captions = _captionTracks$trackN.captions;
22551 var inbandTextTracks = _this3.inbandTextTracks_;
22552
22553 _this3.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
22554
22555 createCaptionsTrackIfNotExists(inbandTextTracks, _this3.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
22556 // We do this because a rendition change that also changes the timescale for captions
22557 // will result in captions being re-parsed for certain segments. If we add them again
22558 // without clearing we will have two of the same captions visible.
22559
22560 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
22561 addCaptionData({
22562 captionArray: captions,
22563 inbandTextTracks: inbandTextTracks,
22564 timestampOffset: timestampOffset
22565 });
22566 }); // Reset stored captions since we added parsed
22567 // captions to a text track at this point
22568
22569 if (this.transmuxer_) {
22570 this.transmuxer_.postMessage({
22571 action: 'clearParsedMp4Captions'
22572 });
22573 }
22574 };
22575
22576 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
22577 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22578
22579 if (this.checkForAbort_(simpleSegment.requestId)) {
22580 return;
22581 }
22582
22583 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
22584
22585 if (!segmentInfo.hasAppendedData_) {
22586 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
22587 return;
22588 }
22589
22590 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
22591 // audio/video source with a metadata track, and an alt audio with a metadata track.
22592 // However, this probably won't happen, and if it does it can be handled then.
22593
22594 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
22595 addMetadata({
22596 inbandTextTracks: this.inbandTextTracks_,
22597 metadataArray: id3Frames,
22598 timestampOffset: timestampOffset,
22599 videoDuration: this.duration_()
22600 });
22601 };
22602
22603 _proto.processMetadataQueue_ = function processMetadataQueue_() {
22604 this.metadataQueue_.id3.forEach(function (fn) {
22605 return fn();
22606 });
22607 this.metadataQueue_.caption.forEach(function (fn) {
22608 return fn();
22609 });
22610 this.metadataQueue_.id3 = [];
22611 this.metadataQueue_.caption = [];
22612 };
22613
22614 _proto.processCallQueue_ = function processCallQueue_() {
22615 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
22616 // functions may check the length of the load queue and default to pushing themselves
22617 // back onto the queue.
22618
22619 this.callQueue_ = [];
22620 callQueue.forEach(function (fun) {
22621 return fun();
22622 });
22623 };
22624
22625 _proto.processLoadQueue_ = function processLoadQueue_() {
22626 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
22627 // functions may check the length of the load queue and default to pushing themselves
22628 // back onto the queue.
22629
22630 this.loadQueue_ = [];
22631 loadQueue.forEach(function (fun) {
22632 return fun();
22633 });
22634 }
22635 /**
22636 * Determines whether the loader has enough info to load the next segment.
22637 *
22638 * @return {boolean}
22639 * Whether or not the loader has enough info to load the next segment
22640 */
22641 ;
22642
22643 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
22644 // Since primary timing goes by video, only the audio loader potentially needs to wait
22645 // to load.
22646 if (this.loaderType_ !== 'audio') {
22647 return true;
22648 }
22649
22650 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
22651 // enough info to load.
22652
22653 if (!segmentInfo) {
22654 return false;
22655 } // The first segment can and should be loaded immediately so that source buffers are
22656 // created together (before appending). Source buffer creation uses the presence of
22657 // audio and video data to determine whether to create audio/video source buffers, and
22658 // uses processed (transmuxed or parsed) media to determine the types required.
22659
22660
22661 if (!this.currentMediaInfo_) {
22662 return true;
22663 }
22664
22665 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
22666 // can be requested and downloaded and only wait before it is transmuxed or parsed.
22667 // But in practice, there are a few reasons why it is better to wait until a loader
22668 // is ready to append that segment before requesting and downloading:
22669 //
22670 // 1. Because audio and main loaders cross discontinuities together, if this loader
22671 // is waiting for the other to catch up, then instead of requesting another
22672 // segment and using up more bandwidth, by not yet loading, more bandwidth is
22673 // allotted to the loader currently behind.
22674 // 2. media-segment-request doesn't have to have logic to consider whether a segment
22675 // is ready to be processed or not, isolating the queueing behavior to the loader.
22676 // 3. The audio loader bases some of its segment properties on timing information
22677 // provided by the main loader, meaning that, if the logic for waiting on
22678 // processing was in media-segment-request, then it would also need to know how
22679 // to re-generate the segment information after the main loader caught up.
22680 shouldWaitForTimelineChange({
22681 timelineChangeController: this.timelineChangeController_,
22682 currentTimeline: this.currentTimeline_,
22683 segmentTimeline: segmentInfo.timeline,
22684 loaderType: this.loaderType_,
22685 audioDisabled: this.audioDisabled_
22686 })) {
22687 return false;
22688 }
22689
22690 return true;
22691 };
22692
22693 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
22694 if (!this.sourceUpdater_.ready()) {
22695 return false;
22696 } // If content needs to be removed or the loader is waiting on an append reattempt,
22697 // then no additional content should be appended until the prior append is resolved.
22698
22699
22700 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
22701 return false;
22702 }
22703
22704 var segmentInfo = this.pendingSegment_; // no segment to append any data for or
22705 // we do not have information on this specific
22706 // segment yet
22707
22708 if (!segmentInfo || !segmentInfo.trackInfo) {
22709 return false;
22710 }
22711
22712 if (!this.handlePartialData_) {
22713 var _this$currentMediaInf = this.currentMediaInfo_,
22714 hasAudio = _this$currentMediaInf.hasAudio,
22715 hasVideo = _this$currentMediaInf.hasVideo,
22716 isMuxed = _this$currentMediaInf.isMuxed;
22717
22718 if (hasVideo && !segmentInfo.videoTimingInfo) {
22719 return false;
22720 } // muxed content only relies on video timing information for now.
22721
22722
22723 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
22724 return false;
22725 }
22726 }
22727
22728 if (shouldWaitForTimelineChange({
22729 timelineChangeController: this.timelineChangeController_,
22730 currentTimeline: this.currentTimeline_,
22731 segmentTimeline: segmentInfo.timeline,
22732 loaderType: this.loaderType_,
22733 audioDisabled: this.audioDisabled_
22734 })) {
22735 return false;
22736 }
22737
22738 return true;
22739 };
22740
22741 _proto.handleData_ = function handleData_(simpleSegment, result) {
22742 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22743
22744 if (this.checkForAbort_(simpleSegment.requestId)) {
22745 return;
22746 } // If there's anything in the call queue, then this data came later and should be
22747 // executed after the calls currently queued.
22748
22749
22750 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
22751 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
22752 return;
22753 }
22754
22755 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
22756
22757 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
22758
22759 this.updateMediaSecondsLoaded_(segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
22760 // logic may change behavior depending on the state, and changing state too early may
22761 // inflate our estimates of bandwidth. In the future this should be re-examined to
22762 // note more granular states.
22763 // don't process and append data if the mediaSource is closed
22764
22765 if (this.mediaSource_.readyState === 'closed') {
22766 return;
22767 } // if this request included an initialization segment, save that data
22768 // to the initSegment cache
22769
22770
22771 if (simpleSegment.map) {
22772 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
22773
22774 segmentInfo.segment.map = simpleSegment.map;
22775 } // if this request included a segment key, save that data in the cache
22776
22777
22778 if (simpleSegment.key) {
22779 this.segmentKey(simpleSegment.key, true);
22780 }
22781
22782 segmentInfo.isFmp4 = simpleSegment.isFmp4;
22783 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
22784
22785 if (segmentInfo.isFmp4) {
22786 this.trigger('fmp4');
22787 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
22788 } else {
22789 var useVideoTimingInfo = this.loaderType_ === 'main' && this.currentMediaInfo_.hasVideo;
22790 var firstVideoFrameTimeForData;
22791
22792 if (useVideoTimingInfo) {
22793 firstVideoFrameTimeForData = this.handlePartialData_ ? result.videoFramePtsTime : segmentInfo.videoTimingInfo.start;
22794 } // Segment loader knows more about segment timing than the transmuxer (in certain
22795 // aspects), so make any changes required for a more accurate start time.
22796 // Don't set the end time yet, as the segment may not be finished processing.
22797
22798
22799 segmentInfo.timingInfo.start = this.trueSegmentStart_({
22800 currentStart: segmentInfo.timingInfo.start,
22801 playlist: segmentInfo.playlist,
22802 mediaIndex: segmentInfo.mediaIndex,
22803 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
22804 useVideoTimingInfo: useVideoTimingInfo,
22805 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
22806 videoTimingInfo: segmentInfo.videoTimingInfo,
22807 audioTimingInfo: segmentInfo.audioTimingInfo
22808 });
22809 } // Init segments for audio and video only need to be appended in certain cases. Now
22810 // that data is about to be appended, we can check the final cases to determine
22811 // whether we should append an init segment.
22812
22813
22814 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
22815 // as we use the start of the segment to offset the best guess (playlist provided)
22816 // timestamp offset.
22817
22818 this.updateSourceBufferTimestampOffset_(segmentInfo); // Save some state so that in the future anything waiting on first append (and/or
22819 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
22820 // we need some notion of whether the timestamp offset or other relevant information
22821 // has had a chance to be set.
22822
22823 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
22824
22825 this.processMetadataQueue_();
22826 this.appendData_(segmentInfo, result);
22827 };
22828
22829 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
22830 // alt audio doesn't manage timestamp offset
22831 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
22832 // segment for each chunk
22833 !segmentInfo.changedTimestampOffset) {
22834 // if the timestamp offset changed, the timeline may have changed, so we have to re-
22835 // append init segments
22836 this.appendInitSegment_ = {
22837 audio: true,
22838 video: true
22839 };
22840 }
22841
22842 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
22843 // make sure we append init segment on playlist changes, in case the media config
22844 // changed
22845 this.appendInitSegment_[type] = true;
22846 }
22847 };
22848
22849 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
22850 var type = _ref4.type,
22851 initSegment = _ref4.initSegment,
22852 map = _ref4.map,
22853 playlist = _ref4.playlist;
22854
22855 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
22856 // (Section 3) required to parse the applicable Media Segments. It applies to every
22857 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
22858 // or until the end of the playlist."
22859 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
22860 if (map) {
22861 var id = initSegmentId(map);
22862
22863 if (this.activeInitSegmentId_ === id) {
22864 // don't need to re-append the init segment if the ID matches
22865 return null;
22866 } // a map-specified init segment takes priority over any transmuxed (or otherwise
22867 // obtained) init segment
22868 //
22869 // this also caches the init segment for later use
22870
22871
22872 initSegment = this.initSegmentForMap(map, true).bytes;
22873 this.activeInitSegmentId_ = id;
22874 } // We used to always prepend init segments for video, however, that shouldn't be
22875 // necessary. Instead, we should only append on changes, similar to what we've always
22876 // done for audio. This is more important (though may not be that important) for
22877 // frame-by-frame appending for LHLS, simply because of the increased quantity of
22878 // appends.
22879
22880
22881 if (initSegment && this.appendInitSegment_[type]) {
22882 // Make sure we track the playlist that we last used for the init segment, so that
22883 // we can re-append the init segment in the event that we get data from a new
22884 // playlist. Discontinuities and track changes are handled in other sections.
22885 this.playlistOfLastInitSegment_[type] = playlist; // we should only be appending the next init segment if we detect a change, or if
22886 // the segment has a map
22887
22888 this.appendInitSegment_[type] = map ? true : false; // we need to clear out the fmp4 active init segment id, since
22889 // we are appending the muxer init segment
22890
22891 this.activeInitSegmentId_ = null;
22892 return initSegment;
22893 }
22894
22895 return null;
22896 };
22897
22898 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
22899 var _this4 = this;
22900
22901 var segmentInfo = _ref5.segmentInfo,
22902 type = _ref5.type,
22903 bytes = _ref5.bytes;
22904 var audioBuffered = this.sourceUpdater_.audioBuffered();
22905 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
22906 // should be cleared out during the buffer removals. However, log in case it helps
22907 // debug.
22908
22909 if (audioBuffered.length > 1) {
22910 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
22911 }
22912
22913 if (videoBuffered.length > 1) {
22914 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
22915 }
22916
22917 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
22918 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
22919 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
22920 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
22921
22922 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
22923 // Can't remove enough buffer to make room for new segment (or the browser doesn't
22924 // allow for appends of segments this size). In the future, it may be possible to
22925 // split up the segment and append in pieces, but for now, error out this playlist
22926 // in an attempt to switch to a more manageable rendition.
22927 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
22928 this.error({
22929 message: 'Quota exceeded error with append of a single segment of content',
22930 // To prevent any possible repeated downloads for content we can't actually
22931 // append, blacklist forever.
22932 blacklistDuration: Infinity
22933 });
22934 this.trigger('error');
22935 return;
22936 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
22937 // that the segment-loader should block on future events until this one is handled, so
22938 // that it doesn't keep moving onto further segments. Adding the call to the call
22939 // queue will prevent further appends until waitingOnRemove_ and
22940 // quotaExceededErrorRetryTimeout_ are cleared.
22941 //
22942 // Note that this will only block the current loader. In the case of demuxed content,
22943 // the other load may keep filling as fast as possible. In practice, this should be
22944 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
22945 // source buffer, or video fills without enough room for audio to append (and without
22946 // the availability of clearing out seconds of back buffer to make room for audio).
22947 // But it might still be good to handle this case in the future as a TODO.
22948
22949
22950 this.waitingOnRemove_ = true;
22951 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
22952 segmentInfo: segmentInfo,
22953 type: type,
22954 bytes: bytes
22955 }));
22956 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
22957 // before retrying.
22958
22959 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
22960 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
22961 this.remove(0, timeToRemoveUntil, function () {
22962 _this4.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
22963
22964 _this4.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
22965 // attempts (since we can't clear less than the minimum)
22966
22967 _this4.quotaExceededErrorRetryTimeout_ = window__default['default'].setTimeout(function () {
22968 _this4.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
22969
22970 _this4.quotaExceededErrorRetryTimeout_ = null;
22971
22972 _this4.processCallQueue_();
22973 }, MIN_BACK_BUFFER * 1000);
22974 }, true);
22975 };
22976
22977 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
22978 var segmentInfo = _ref6.segmentInfo,
22979 type = _ref6.type,
22980 bytes = _ref6.bytes;
22981
22982 // if there's no error, nothing to do
22983 if (!error) {
22984 return;
22985 }
22986
22987 if (error.code === QUOTA_EXCEEDED_ERR) {
22988 this.handleQuotaExceededError_({
22989 segmentInfo: segmentInfo,
22990 type: type,
22991 bytes: bytes
22992 }); // A quota exceeded error should be recoverable with a future re-append, so no need
22993 // to trigger an append error.
22994
22995 return;
22996 }
22997
22998 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
22999 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
23000 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
23001 //
23002 // Trigger a special error so that it can be handled separately from normal,
23003 // recoverable errors.
23004
23005 this.trigger('appenderror');
23006 };
23007
23008 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
23009 var segmentInfo = _ref7.segmentInfo,
23010 type = _ref7.type,
23011 initSegment = _ref7.initSegment,
23012 data = _ref7.data,
23013 bytes = _ref7.bytes;
23014
23015 // If this is a re-append, bytes were already created and don't need to be recreated
23016 if (!bytes) {
23017 var segments = [data];
23018 var byteLength = data.byteLength;
23019
23020 if (initSegment) {
23021 // if the media initialization segment is changing, append it before the content
23022 // segment
23023 segments.unshift(initSegment);
23024 byteLength += initSegment.byteLength;
23025 } // Technically we should be OK appending the init segment separately, however, we
23026 // haven't yet tested that, and prepending is how we have always done things.
23027
23028
23029 bytes = concatSegments({
23030 bytes: byteLength,
23031 segments: segments
23032 });
23033 }
23034
23035 this.sourceUpdater_.appendBuffer({
23036 segmentInfo: segmentInfo,
23037 type: type,
23038 bytes: bytes
23039 }, this.handleAppendError_.bind(this, {
23040 segmentInfo: segmentInfo,
23041 type: type,
23042 bytes: bytes
23043 }));
23044 };
23045
23046 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
23047 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
23048 return;
23049 }
23050
23051 var segment = this.pendingSegment_.segment;
23052 var timingInfoProperty = type + "TimingInfo";
23053
23054 if (!segment[timingInfoProperty]) {
23055 segment[timingInfoProperty] = {};
23056 }
23057
23058 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
23059 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
23060 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
23061 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
23062 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
23063
23064 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
23065 };
23066
23067 _proto.appendData_ = function appendData_(segmentInfo, result) {
23068 var type = result.type,
23069 data = result.data;
23070
23071 if (!data || !data.byteLength) {
23072 return;
23073 }
23074
23075 if (type === 'audio' && this.audioDisabled_) {
23076 return;
23077 }
23078
23079 var initSegment = this.getInitSegmentAndUpdateState_({
23080 type: type,
23081 initSegment: result.initSegment,
23082 playlist: segmentInfo.playlist,
23083 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
23084 });
23085 this.appendToSourceBuffer_({
23086 segmentInfo: segmentInfo,
23087 type: type,
23088 initSegment: initSegment,
23089 data: data
23090 });
23091 }
23092 /**
23093 * load a specific segment from a request into the buffer
23094 *
23095 * @private
23096 */
23097 ;
23098
23099 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
23100 var _this5 = this;
23101
23102 this.state = 'WAITING';
23103 this.pendingSegment_ = segmentInfo;
23104 this.trimBackBuffer_(segmentInfo);
23105
23106 if (typeof segmentInfo.timestampOffset === 'number') {
23107 if (this.transmuxer_) {
23108 this.transmuxer_.postMessage({
23109 action: 'clearAllMp4Captions'
23110 });
23111 }
23112 }
23113
23114 if (!this.hasEnoughInfoToLoad_()) {
23115 this.loadQueue_.push(function () {
23116 var buffered = _this5.buffered_();
23117
23118 if (typeof segmentInfo.timestampOffset === 'number') {
23119 // The timestamp offset needs to be regenerated, as the buffer most likely
23120 // changed since the function was added to the queue. This is expected, as the
23121 // load is usually pending the main loader appending new segments.
23122 //
23123 // Note also that the overrideCheck property is set to true. This is because
23124 // isPendingTimestampOffset is set back to false after the first set of the
23125 // timestamp offset (before it was added to the queue). But the presence of
23126 // timestamp offset as a property of segmentInfo serves as enough evidence that
23127 // it should be regenerated.
23128 segmentInfo.timestampOffset = timestampOffsetForSegment({
23129 segmentTimeline: segmentInfo.timeline,
23130 currentTimeline: _this5.currentTimeline_,
23131 startOfSegment: segmentInfo.startOfSegment,
23132 buffered: buffered,
23133 overrideCheck: true
23134 });
23135 }
23136
23137 delete segmentInfo.audioAppendStart;
23138
23139 var audioBuffered = _this5.sourceUpdater_.audioBuffered();
23140
23141 if (audioBuffered.length) {
23142 // Because the audio timestamp offset may have been changed by the main loader,
23143 // the audioAppendStart should be regenerated.
23144 //
23145 // Since the transmuxer is using the actual timing values, but the buffer is
23146 // adjusted by the timestamp offset, the value must be adjusted.
23147 segmentInfo.audioAppendStart = audioBuffered.end(audioBuffered.length - 1) - _this5.sourceUpdater_.audioTimestampOffset();
23148 }
23149
23150 _this5.updateTransmuxerAndRequestSegment_(segmentInfo);
23151 });
23152 return;
23153 }
23154
23155 this.updateTransmuxerAndRequestSegment_(segmentInfo);
23156 };
23157
23158 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
23159 var _this6 = this;
23160
23161 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
23162 // the transmuxer still needs to be updated before then.
23163 //
23164 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
23165 // offset must be passed to the transmuxer for stream correcting adjustments.
23166 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
23167 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
23168
23169 segmentInfo.gopsToAlignWith = [];
23170 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
23171
23172 this.transmuxer_.postMessage({
23173 action: 'reset'
23174 });
23175 this.transmuxer_.postMessage({
23176 action: 'setTimestampOffset',
23177 timestampOffset: segmentInfo.timestampOffset
23178 });
23179 }
23180
23181 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
23182 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
23183 var isWalkingForward = this.mediaIndex !== null;
23184 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
23185 // the first timeline
23186 segmentInfo.timeline > 0;
23187 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
23188 this.logger_("Requesting " + segmentInfoString(segmentInfo));
23189 segmentInfo.abortRequests = mediaSegmentRequest({
23190 xhr: this.vhs_.xhr,
23191 xhrOptions: this.xhrOptions_,
23192 decryptionWorker: this.decrypter_,
23193 segment: simpleSegment,
23194 handlePartialData: this.handlePartialData_,
23195 abortFn: this.handleAbort_.bind(this, segmentInfo),
23196 progressFn: this.handleProgress_.bind(this),
23197 trackInfoFn: this.handleTrackInfo_.bind(this),
23198 timingInfoFn: this.handleTimingInfo_.bind(this),
23199 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
23200 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
23201 captionsFn: this.handleCaptions_.bind(this),
23202 isEndOfTimeline: isEndOfTimeline,
23203 endedTimelineFn: function endedTimelineFn() {
23204 _this6.logger_('received endedtimeline callback');
23205 },
23206 id3Fn: this.handleId3_.bind(this),
23207 dataFn: this.handleData_.bind(this),
23208 doneFn: this.segmentRequestFinished_.bind(this)
23209 });
23210 }
23211 /**
23212 * trim the back buffer so that we don't have too much data
23213 * in the source buffer
23214 *
23215 * @private
23216 *
23217 * @param {Object} segmentInfo - the current segment
23218 */
23219 ;
23220
23221 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
23222 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
23223 // buffer and a very conservative "garbage collector"
23224 // We manually clear out the old buffer to ensure
23225 // we don't trigger the QuotaExceeded error
23226 // on the source buffer during subsequent appends
23227
23228 if (removeToTime > 0) {
23229 this.remove(0, removeToTime);
23230 }
23231 }
23232 /**
23233 * created a simplified copy of the segment object with just the
23234 * information necessary to perform the XHR and decryption
23235 *
23236 * @private
23237 *
23238 * @param {Object} segmentInfo - the current segment
23239 * @return {Object} a simplified segment object copy
23240 */
23241 ;
23242
23243 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
23244 var segment = segmentInfo.segment;
23245 var part = segmentInfo.part;
23246 var simpleSegment = {
23247 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
23248 byterange: part ? part.byterange : segment.byterange,
23249 requestId: segmentInfo.requestId,
23250 transmuxer: segmentInfo.transmuxer,
23251 audioAppendStart: segmentInfo.audioAppendStart,
23252 gopsToAlignWith: segmentInfo.gopsToAlignWith,
23253 part: segmentInfo.part
23254 };
23255 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
23256
23257 if (previousSegment && previousSegment.timeline === segment.timeline) {
23258 // The baseStartTime of a segment is used to handle rollover when probing the TS
23259 // segment to retrieve timing information. Since the probe only looks at the media's
23260 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
23261 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
23262 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
23263 // seconds of media time, so should be used here. The previous segment is used since
23264 // the end of the previous segment should represent the beginning of the current
23265 // segment, so long as they are on the same timeline.
23266 if (previousSegment.videoTimingInfo) {
23267 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
23268 } else if (previousSegment.audioTimingInfo) {
23269 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
23270 }
23271 }
23272
23273 if (segment.key) {
23274 // if the media sequence is greater than 2^32, the IV will be incorrect
23275 // assuming 10s segments, that would be about 1300 years
23276 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
23277 simpleSegment.key = this.segmentKey(segment.key);
23278 simpleSegment.key.iv = iv;
23279 }
23280
23281 if (segment.map) {
23282 simpleSegment.map = this.initSegmentForMap(segment.map);
23283 }
23284
23285 return simpleSegment;
23286 };
23287
23288 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
23289 // every request counts as a media request even if it has been aborted
23290 // or canceled due to a timeout
23291 this.mediaRequests += 1;
23292
23293 if (stats) {
23294 this.mediaBytesTransferred += stats.bytesReceived;
23295 this.mediaTransferDuration += stats.roundTripTime;
23296 }
23297 };
23298
23299 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
23300 // byteLength will be used for throughput, and should be based on bytes receieved,
23301 // which we only know at the end of the request and should reflect total bytes
23302 // downloaded rather than just bytes processed from components of the segment
23303 this.pendingSegment_.byteLength = stats.bytesReceived;
23304
23305 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23306 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23307 return;
23308 }
23309
23310 this.bandwidth = stats.bandwidth;
23311 this.roundTrip = stats.roundTripTime;
23312 };
23313
23314 _proto.handleTimeout_ = function handleTimeout_() {
23315 // although the VTT segment loader bandwidth isn't really used, it's good to
23316 // maintain functinality between segment loaders
23317 this.mediaRequestsTimedout += 1;
23318 this.bandwidth = 1;
23319 this.roundTrip = NaN;
23320 this.trigger('bandwidthupdate');
23321 }
23322 /**
23323 * Handle the callback from the segmentRequest function and set the
23324 * associated SegmentLoader state and errors if necessary
23325 *
23326 * @private
23327 */
23328 ;
23329
23330 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
23331 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
23332 // check the call queue directly since this function doesn't need to deal with any
23333 // data, and can continue even if the source buffers are not set up and we didn't get
23334 // any data from the segment
23335 if (this.callQueue_.length) {
23336 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
23337 return;
23338 }
23339
23340 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
23341
23342 if (!this.pendingSegment_) {
23343 return;
23344 } // the request was aborted and the SegmentLoader has already started
23345 // another request. this can happen when the timeout for an aborted
23346 // request triggers due to a limitation in the XHR library
23347 // do not count this as any sort of request or we risk double-counting
23348
23349
23350 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
23351 return;
23352 } // an error occurred from the active pendingSegment_ so reset everything
23353
23354
23355 if (error) {
23356 this.pendingSegment_ = null;
23357 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
23358
23359 if (error.code === REQUEST_ERRORS.ABORTED) {
23360 return;
23361 }
23362
23363 this.pause(); // the error is really just that at least one of the requests timed-out
23364 // set the bandwidth to a very low value and trigger an ABR switch to
23365 // take emergency action
23366
23367 if (error.code === REQUEST_ERRORS.TIMEOUT) {
23368 this.handleTimeout_();
23369 return;
23370 } // if control-flow has arrived here, then the error is real
23371 // emit an error event to blacklist the current playlist
23372
23373
23374 this.mediaRequestsErrored += 1;
23375 this.error(error);
23376 this.trigger('error');
23377 return;
23378 }
23379
23380 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
23381 // generated for ABR purposes
23382
23383 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
23384 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
23385
23386 if (result.gopInfo) {
23387 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
23388 } // Although we may have already started appending on progress, we shouldn't switch the
23389 // state away from loading until we are officially done loading the segment data.
23390
23391
23392 this.state = 'APPENDING'; // used for testing
23393
23394 this.trigger('appending');
23395 this.waitForAppendsToComplete_(segmentInfo);
23396 };
23397
23398 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
23399 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
23400
23401 if (timelineMapping !== null) {
23402 this.timeMapping_ = timelineMapping;
23403 }
23404 };
23405
23406 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
23407 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
23408 this.mediaSecondsLoaded += segment.end - segment.start;
23409 } else {
23410 this.mediaSecondsLoaded += segment.duration;
23411 }
23412 };
23413
23414 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
23415 if (timestampOffset === null) {
23416 return false;
23417 } // note that we're potentially using the same timestamp offset for both video and
23418 // audio
23419
23420
23421 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23422 return true;
23423 }
23424
23425 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23426 return true;
23427 }
23428
23429 return false;
23430 };
23431
23432 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref8) {
23433 var currentStart = _ref8.currentStart,
23434 playlist = _ref8.playlist,
23435 mediaIndex = _ref8.mediaIndex,
23436 firstVideoFrameTimeForData = _ref8.firstVideoFrameTimeForData,
23437 currentVideoTimestampOffset = _ref8.currentVideoTimestampOffset,
23438 useVideoTimingInfo = _ref8.useVideoTimingInfo,
23439 videoTimingInfo = _ref8.videoTimingInfo,
23440 audioTimingInfo = _ref8.audioTimingInfo;
23441
23442 if (typeof currentStart !== 'undefined') {
23443 // if start was set once, keep using it
23444 return currentStart;
23445 }
23446
23447 if (!useVideoTimingInfo) {
23448 return audioTimingInfo.start;
23449 }
23450
23451 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
23452 // within that segment. Since the transmuxer maintains a cache of incomplete data
23453 // from and/or the last frame seen, the start time may reflect a frame that starts
23454 // in the previous segment. Check for that case and ensure the start time is
23455 // accurate for the segment.
23456
23457 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
23458 return firstVideoFrameTimeForData;
23459 }
23460
23461 return videoTimingInfo.start;
23462 };
23463
23464 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
23465 if (!this.currentMediaInfo_) {
23466 this.error({
23467 message: 'No starting media returned, likely due to an unsupported media format.',
23468 blacklistDuration: Infinity
23469 });
23470 this.trigger('error');
23471 return;
23472 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
23473 // on each queue this loader is responsible for to ensure that the appends are
23474 // complete.
23475
23476
23477 var _this$currentMediaInf2 = this.currentMediaInfo_,
23478 hasAudio = _this$currentMediaInf2.hasAudio,
23479 hasVideo = _this$currentMediaInf2.hasVideo,
23480 isMuxed = _this$currentMediaInf2.isMuxed;
23481 var waitForVideo = this.loaderType_ === 'main' && hasVideo; // TODO: does this break partial support for muxed content?
23482
23483 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
23484 segmentInfo.waitingOnAppends = 0; // segments with no data
23485
23486 if (!segmentInfo.hasAppendedData_) {
23487 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
23488 // When there's no audio or video data in the segment, there's no audio or video
23489 // timing information.
23490 //
23491 // If there's no audio or video timing information, then the timestamp offset
23492 // can't be adjusted to the appropriate value for the transmuxer and source
23493 // buffers.
23494 //
23495 // Therefore, the next segment should be used to set the timestamp offset.
23496 this.isPendingTimestampOffset_ = true;
23497 } // override settings for metadata only segments
23498
23499
23500 segmentInfo.timingInfo = {
23501 start: 0
23502 };
23503 segmentInfo.waitingOnAppends++;
23504
23505 if (!this.isPendingTimestampOffset_) {
23506 // update the timestampoffset
23507 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
23508 // no video/audio data.
23509
23510 this.processMetadataQueue_();
23511 } // append is "done" instantly with no data.
23512
23513
23514 this.checkAppendsDone_(segmentInfo);
23515 return;
23516 } // Since source updater could call back synchronously, do the increments first.
23517
23518
23519 if (waitForVideo) {
23520 segmentInfo.waitingOnAppends++;
23521 }
23522
23523 if (waitForAudio) {
23524 segmentInfo.waitingOnAppends++;
23525 }
23526
23527 if (waitForVideo) {
23528 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
23529 }
23530
23531 if (waitForAudio) {
23532 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
23533 }
23534 };
23535
23536 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
23537 if (this.checkForAbort_(segmentInfo.requestId)) {
23538 return;
23539 }
23540
23541 segmentInfo.waitingOnAppends--;
23542
23543 if (segmentInfo.waitingOnAppends === 0) {
23544 this.handleAppendsDone_();
23545 }
23546 };
23547
23548 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
23549 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.currentMediaInfo_, trackInfo);
23550
23551 if (illegalMediaSwitchError) {
23552 this.error({
23553 message: illegalMediaSwitchError,
23554 blacklistDuration: Infinity
23555 });
23556 this.trigger('error');
23557 return true;
23558 }
23559
23560 return false;
23561 };
23562
23563 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
23564 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
23565 // priority, timing-wise, so we must wait
23566 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
23567 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
23568 this.loaderType_ !== 'main') {
23569 return;
23570 }
23571
23572 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
23573 // the timing info here comes from video. In the event that the audio is longer than
23574 // the video, this will trim the start of the audio.
23575 // This also trims any offset from 0 at the beginning of the media
23576
23577 segmentInfo.timestampOffset -= segmentInfo.timingInfo.start; // In the event that there are partial segment downloads, each will try to update the
23578 // timestamp offset. Retaining this bit of state prevents us from updating in the
23579 // future (within the same segment), however, there may be a better way to handle it.
23580
23581 segmentInfo.changedTimestampOffset = true;
23582
23583 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23584 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
23585 didChange = true;
23586 }
23587
23588 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23589 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
23590 didChange = true;
23591 }
23592
23593 if (didChange) {
23594 this.trigger('timestampoffset');
23595 }
23596 };
23597
23598 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
23599 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
23600 var useVideoTimingInfo = this.loaderType_ === 'main' && this.currentMediaInfo_.hasVideo;
23601 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
23602
23603 if (!prioritizedTimingInfo) {
23604 return;
23605 }
23606
23607 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
23608 // current example is the case of fmp4), so use the rough duration to calculate an
23609 // end time.
23610 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
23611 }
23612 /**
23613 * callback to run when appendBuffer is finished. detects if we are
23614 * in a good state to do things with the data we got, or if we need
23615 * to wait for more
23616 *
23617 * @private
23618 */
23619 ;
23620
23621 _proto.handleAppendsDone_ = function handleAppendsDone_() {
23622 // appendsdone can cause an abort
23623 if (this.pendingSegment_) {
23624 this.trigger('appendsdone');
23625 }
23626
23627 if (!this.pendingSegment_) {
23628 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
23629 // all appending cases?
23630
23631 if (!this.paused()) {
23632 this.monitorBuffer_();
23633 }
23634
23635 return;
23636 }
23637
23638 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
23639 // best to wait until all appends are done so we're sure that the primary media is
23640 // finished (and we have its end time).
23641
23642 this.updateTimingInfoEnd_(segmentInfo);
23643
23644 if (this.shouldSaveSegmentTimingInfo_) {
23645 // Timeline mappings should only be saved for the main loader. This is for multiple
23646 // reasons:
23647 //
23648 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
23649 // and the main loader try to save the timeline mapping, whichever comes later
23650 // will overwrite the first. In theory this is OK, as the mappings should be the
23651 // same, however, it breaks for (2)
23652 // 2) In the event of a live stream, the initial live point will make for a somewhat
23653 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
23654 // the mapping will be off for one of the streams, dependent on which one was
23655 // first saved (see (1)).
23656 // 3) Primary timing goes by video in VHS, so the mapping should be video.
23657 //
23658 // Since the audio loader will wait for the main loader to load the first segment,
23659 // the main loader will save the first timeline mapping, and ensure that there won't
23660 // be a case where audio loads two segments without saving a mapping (thus leading
23661 // to missing segment timing info).
23662 this.syncController_.saveSegmentTimingInfo({
23663 segmentInfo: segmentInfo,
23664 shouldSaveTimelineMapping: this.loaderType_ === 'main'
23665 });
23666 }
23667
23668 this.logger_("Appended " + segmentInfoString(segmentInfo));
23669 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
23670
23671 if (segmentDurationMessage) {
23672 if (segmentDurationMessage.severity === 'warn') {
23673 videojs__default['default'].log.warn(segmentDurationMessage.message);
23674 } else {
23675 this.logger_(segmentDurationMessage.message);
23676 }
23677 }
23678
23679 this.recordThroughput_(segmentInfo);
23680 this.pendingSegment_ = null;
23681 this.state = 'READY'; // TODO minor, but for partial segment downloads, this can be done earlier to save
23682 // on bandwidth and download time
23683
23684 if (segmentInfo.isSyncRequest) {
23685 this.trigger('syncinfoupdate');
23686 return;
23687 }
23688
23689 this.addSegmentMetadataCue_(segmentInfo);
23690 this.fetchAtBuffer_ = true;
23691
23692 if (this.currentTimeline_ !== segmentInfo.timeline) {
23693 this.timelineChangeController_.lastTimelineChange({
23694 type: this.loaderType_,
23695 from: this.currentTimeline_,
23696 to: segmentInfo.timeline
23697 }); // If audio is not disabled, the main segment loader is responsible for updating
23698 // the audio timeline as well. If the content is video only, this won't have any
23699 // impact.
23700
23701 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
23702 this.timelineChangeController_.lastTimelineChange({
23703 type: 'audio',
23704 from: this.currentTimeline_,
23705 to: segmentInfo.timeline
23706 });
23707 }
23708 }
23709
23710 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
23711 // the following conditional otherwise it may consider this a bad "guess"
23712 // and attempt to resync when the post-update seekable window and live
23713 // point would mean that this was the perfect segment to fetch
23714
23715 this.trigger('syncinfoupdate');
23716 var segment = segmentInfo.segment; // If we previously appended a segment that ends more than 3 targetDurations before
23717 // the currentTime_ that means that our conservative guess was too conservative.
23718 // In that case, reset the loader state so that we try to use any information gained
23719 // from the previous request to create a new, more accurate, sync-point.
23720
23721 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
23722 this.resetEverything();
23723 return;
23724 }
23725
23726 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
23727 // and conservatively guess
23728
23729 if (isWalkingForward) {
23730 this.trigger('bandwidthupdate');
23731 }
23732
23733 this.trigger('progress');
23734 this.mediaIndex = segmentInfo.mediaIndex;
23735 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
23736 // buffer, end the stream. this ensures the "ended" event will
23737 // fire if playback reaches that point.
23738
23739 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
23740 this.endOfStream();
23741 } // used for testing
23742
23743
23744 this.trigger('appended');
23745
23746 if (!this.paused()) {
23747 this.monitorBuffer_();
23748 }
23749 }
23750 /**
23751 * Records the current throughput of the decrypt, transmux, and append
23752 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
23753 * moving average of the throughput. `throughput.count` is the number of
23754 * data points in the average.
23755 *
23756 * @private
23757 * @param {Object} segmentInfo the object returned by loadSegment
23758 */
23759 ;
23760
23761 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
23762 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23763 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23764 return;
23765 }
23766
23767 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
23768 // by zero in the case where the throughput is ridiculously high
23769
23770 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
23771
23772 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
23773 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
23774
23775 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
23776 }
23777 /**
23778 * Adds a cue to the segment-metadata track with some metadata information about the
23779 * segment
23780 *
23781 * @private
23782 * @param {Object} segmentInfo
23783 * the object returned by loadSegment
23784 * @method addSegmentMetadataCue_
23785 */
23786 ;
23787
23788 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
23789 if (!this.segmentMetadataTrack_) {
23790 return;
23791 }
23792
23793 var segment = segmentInfo.segment;
23794 var start = segment.start;
23795 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
23796
23797 if (!finite(start) || !finite(end)) {
23798 return;
23799 }
23800
23801 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
23802 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
23803 var value = {
23804 custom: segment.custom,
23805 dateTimeObject: segment.dateTimeObject,
23806 dateTimeString: segment.dateTimeString,
23807 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
23808 resolution: segmentInfo.playlist.attributes.RESOLUTION,
23809 codecs: segmentInfo.playlist.attributes.CODECS,
23810 byteLength: segmentInfo.byteLength,
23811 uri: segmentInfo.uri,
23812 timeline: segmentInfo.timeline,
23813 playlist: segmentInfo.playlist.id,
23814 start: start,
23815 end: end
23816 };
23817 var data = JSON.stringify(value);
23818 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
23819 // the differences of WebKitDataCue in safari and VTTCue in other browsers
23820
23821 cue.value = value;
23822 this.segmentMetadataTrack_.addCue(cue);
23823 };
23824
23825 return SegmentLoader;
23826 }(videojs__default['default'].EventTarget);
23827
23828 function noop() {}
23829
23830 var toTitleCase = function toTitleCase(string) {
23831 if (typeof string !== 'string') {
23832 return string;
23833 }
23834
23835 return string.replace(/./, function (w) {
23836 return w.toUpperCase();
23837 });
23838 };
23839
23840 var bufferTypes = ['video', 'audio'];
23841
23842 var _updating = function updating(type, sourceUpdater) {
23843 var sourceBuffer = sourceUpdater[type + "Buffer"];
23844 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
23845 };
23846
23847 var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
23848 for (var i = 0; i < queue.length; i++) {
23849 var queueEntry = queue[i];
23850
23851 if (queueEntry.type === 'mediaSource') {
23852 // If the next entry is a media source entry (uses multiple source buffers), block
23853 // processing to allow it to go through first.
23854 return null;
23855 }
23856
23857 if (queueEntry.type === type) {
23858 return i;
23859 }
23860 }
23861
23862 return null;
23863 };
23864
23865 var shiftQueue = function shiftQueue(type, sourceUpdater) {
23866 if (sourceUpdater.queue.length === 0) {
23867 return;
23868 }
23869
23870 var queueIndex = 0;
23871 var queueEntry = sourceUpdater.queue[queueIndex];
23872
23873 if (queueEntry.type === 'mediaSource') {
23874 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
23875 sourceUpdater.queue.shift();
23876 queueEntry.action(sourceUpdater);
23877
23878 if (queueEntry.doneFn) {
23879 queueEntry.doneFn();
23880 } // Only specific source buffer actions must wait for async updateend events. Media
23881 // Source actions process synchronously. Therefore, both audio and video source
23882 // buffers are now clear to process the next queue entries.
23883
23884
23885 shiftQueue('audio', sourceUpdater);
23886 shiftQueue('video', sourceUpdater);
23887 } // Media Source actions require both source buffers, so if the media source action
23888 // couldn't process yet (because one or both source buffers are busy), block other
23889 // queue actions until both are available and the media source action can process.
23890
23891
23892 return;
23893 }
23894
23895 if (type === 'mediaSource') {
23896 // If the queue was shifted by a media source action (this happens when pushing a
23897 // media source action onto the queue), then it wasn't from an updateend event from an
23898 // audio or video source buffer, so there's no change from previous state, and no
23899 // processing should be done.
23900 return;
23901 } // Media source queue entries don't need to consider whether the source updater is
23902 // started (i.e., source buffers are created) as they don't need the source buffers, but
23903 // source buffer queue entries do.
23904
23905
23906 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
23907 return;
23908 }
23909
23910 if (queueEntry.type !== type) {
23911 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
23912
23913 if (queueIndex === null) {
23914 // Either there's no queue entry that uses this source buffer type in the queue, or
23915 // there's a media source queue entry before the next entry of this type, in which
23916 // case wait for that action to process first.
23917 return;
23918 }
23919
23920 queueEntry = sourceUpdater.queue[queueIndex];
23921 }
23922
23923 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
23924 //
23925 // The queue pending operation must be set before the action is performed in the event
23926 // that the action results in a synchronous event that is acted upon. For instance, if
23927 // an exception is thrown that can be handled, it's possible that new actions will be
23928 // appended to an empty queue and immediately executed, but would not have the correct
23929 // pending information if this property was set after the action was performed.
23930
23931 sourceUpdater.queuePending[type] = queueEntry;
23932 queueEntry.action(type, sourceUpdater);
23933
23934 if (!queueEntry.doneFn) {
23935 // synchronous operation, process next entry
23936 sourceUpdater.queuePending[type] = null;
23937 shiftQueue(type, sourceUpdater);
23938 return;
23939 }
23940 };
23941
23942 var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
23943 var buffer = sourceUpdater[type + "Buffer"];
23944 var titleType = toTitleCase(type);
23945
23946 if (!buffer) {
23947 return;
23948 }
23949
23950 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
23951 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
23952 sourceUpdater.codecs[type] = null;
23953 sourceUpdater[type + "Buffer"] = null;
23954 };
23955
23956 var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
23957 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
23958 };
23959
23960 var actions = {
23961 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
23962 return function (type, sourceUpdater) {
23963 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
23964 // or the media source does not contain this source buffer.
23965
23966 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23967 return;
23968 }
23969
23970 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
23971
23972 try {
23973 sourceBuffer.appendBuffer(bytes);
23974 } catch (e) {
23975 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
23976 sourceUpdater.queuePending[type] = null;
23977 onError(e);
23978 }
23979 };
23980 },
23981 remove: function remove(start, end) {
23982 return function (type, sourceUpdater) {
23983 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
23984 // or the media source does not contain this source buffer.
23985
23986 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
23987 return;
23988 }
23989
23990 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
23991
23992 try {
23993 sourceBuffer.remove(start, end);
23994 } catch (e) {
23995 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
23996 }
23997 };
23998 },
23999 timestampOffset: function timestampOffset(offset) {
24000 return function (type, sourceUpdater) {
24001 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24002 // or the media source does not contain this source buffer.
24003
24004 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24005 return;
24006 }
24007
24008 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
24009 sourceBuffer.timestampOffset = offset;
24010 };
24011 },
24012 callback: function callback(_callback) {
24013 return function (type, sourceUpdater) {
24014 _callback();
24015 };
24016 },
24017 endOfStream: function endOfStream(error) {
24018 return function (sourceUpdater) {
24019 if (sourceUpdater.mediaSource.readyState !== 'open') {
24020 return;
24021 }
24022
24023 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
24024
24025 try {
24026 sourceUpdater.mediaSource.endOfStream(error);
24027 } catch (e) {
24028 videojs__default['default'].log.warn('Failed to call media source endOfStream', e);
24029 }
24030 };
24031 },
24032 duration: function duration(_duration) {
24033 return function (sourceUpdater) {
24034 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
24035
24036 try {
24037 sourceUpdater.mediaSource.duration = _duration;
24038 } catch (e) {
24039 videojs__default['default'].log.warn('Failed to set media source duration', e);
24040 }
24041 };
24042 },
24043 abort: function abort() {
24044 return function (type, sourceUpdater) {
24045 if (sourceUpdater.mediaSource.readyState !== 'open') {
24046 return;
24047 }
24048
24049 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24050 // or the media source does not contain this source buffer.
24051
24052 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24053 return;
24054 }
24055
24056 sourceUpdater.logger_("calling abort on " + type + "Buffer");
24057
24058 try {
24059 sourceBuffer.abort();
24060 } catch (e) {
24061 videojs__default['default'].log.warn("Failed to abort on " + type + "Buffer", e);
24062 }
24063 };
24064 },
24065 addSourceBuffer: function addSourceBuffer(type, codec) {
24066 return function (sourceUpdater) {
24067 var titleType = toTitleCase(type);
24068 var mime = getMimeForCodec(codec);
24069 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
24070 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
24071 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24072 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24073 sourceUpdater.codecs[type] = codec;
24074 sourceUpdater[type + "Buffer"] = sourceBuffer;
24075 };
24076 },
24077 removeSourceBuffer: function removeSourceBuffer(type) {
24078 return function (sourceUpdater) {
24079 var sourceBuffer = sourceUpdater[type + "Buffer"];
24080 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
24081 // or the media source does not contain this source buffer.
24082
24083 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24084 return;
24085 }
24086
24087 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
24088
24089 try {
24090 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
24091 } catch (e) {
24092 videojs__default['default'].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
24093 }
24094 };
24095 },
24096 changeType: function changeType(codec) {
24097 return function (type, sourceUpdater) {
24098 var sourceBuffer = sourceUpdater[type + "Buffer"];
24099 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
24100 // or the media source does not contain this source buffer.
24101
24102 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24103 return;
24104 } // do not update codec if we don't need to.
24105
24106
24107 if (sourceUpdater.codecs[type] === codec) {
24108 return;
24109 }
24110
24111 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
24112 sourceBuffer.changeType(mime);
24113 sourceUpdater.codecs[type] = codec;
24114 };
24115 }
24116 };
24117
24118 var pushQueue = function pushQueue(_ref) {
24119 var type = _ref.type,
24120 sourceUpdater = _ref.sourceUpdater,
24121 action = _ref.action,
24122 doneFn = _ref.doneFn,
24123 name = _ref.name;
24124 sourceUpdater.queue.push({
24125 type: type,
24126 action: action,
24127 doneFn: doneFn,
24128 name: name
24129 });
24130 shiftQueue(type, sourceUpdater);
24131 };
24132
24133 var onUpdateend = function onUpdateend(type, sourceUpdater) {
24134 return function (e) {
24135 // Although there should, in theory, be a pending action for any updateend receieved,
24136 // there are some actions that may trigger updateend events without set definitions in
24137 // the w3c spec. For instance, setting the duration on the media source may trigger
24138 // updateend events on source buffers. This does not appear to be in the spec. As such,
24139 // if we encounter an updateend without a corresponding pending action from our queue
24140 // for that source buffer type, process the next action.
24141 if (sourceUpdater.queuePending[type]) {
24142 var doneFn = sourceUpdater.queuePending[type].doneFn;
24143 sourceUpdater.queuePending[type] = null;
24144
24145 if (doneFn) {
24146 // if there's an error, report it
24147 doneFn(sourceUpdater[type + "Error_"]);
24148 }
24149 }
24150
24151 shiftQueue(type, sourceUpdater);
24152 };
24153 };
24154 /**
24155 * A queue of callbacks to be serialized and applied when a
24156 * MediaSource and its associated SourceBuffers are not in the
24157 * updating state. It is used by the segment loader to update the
24158 * underlying SourceBuffers when new data is loaded, for instance.
24159 *
24160 * @class SourceUpdater
24161 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
24162 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
24163 */
24164
24165
24166 var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
24167 inheritsLoose(SourceUpdater, _videojs$EventTarget);
24168
24169 function SourceUpdater(mediaSource) {
24170 var _this;
24171
24172 _this = _videojs$EventTarget.call(this) || this;
24173 _this.mediaSource = mediaSource;
24174
24175 _this.sourceopenListener_ = function () {
24176 return shiftQueue('mediaSource', assertThisInitialized(_this));
24177 };
24178
24179 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
24180
24181 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
24182
24183 _this.audioTimestampOffset_ = 0;
24184 _this.videoTimestampOffset_ = 0;
24185 _this.queue = [];
24186 _this.queuePending = {
24187 audio: null,
24188 video: null
24189 };
24190 _this.delayedAudioAppendQueue_ = [];
24191 _this.videoAppendQueued_ = false;
24192 _this.codecs = {};
24193 _this.onVideoUpdateEnd_ = onUpdateend('video', assertThisInitialized(_this));
24194 _this.onAudioUpdateEnd_ = onUpdateend('audio', assertThisInitialized(_this));
24195
24196 _this.onVideoError_ = function (e) {
24197 // used for debugging
24198 _this.videoError_ = e;
24199 };
24200
24201 _this.onAudioError_ = function (e) {
24202 // used for debugging
24203 _this.audioError_ = e;
24204 };
24205
24206 _this.createdSourceBuffers_ = false;
24207 _this.initializedEme_ = false;
24208 _this.triggeredReady_ = false;
24209 return _this;
24210 }
24211
24212 var _proto = SourceUpdater.prototype;
24213
24214 _proto.initializedEme = function initializedEme() {
24215 this.initializedEme_ = true;
24216 this.triggerReady();
24217 };
24218
24219 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
24220 // if false, likely waiting on one of the segment loaders to get enough data to create
24221 // source buffers
24222 return this.createdSourceBuffers_;
24223 };
24224
24225 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
24226 return this.initializedEme_;
24227 };
24228
24229 _proto.ready = function ready() {
24230 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
24231 };
24232
24233 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
24234 if (this.hasCreatedSourceBuffers()) {
24235 // already created them before
24236 return;
24237 } // the intial addOrChangeSourceBuffers will always be
24238 // two add buffers.
24239
24240
24241 this.addOrChangeSourceBuffers(codecs);
24242 this.createdSourceBuffers_ = true;
24243 this.trigger('createdsourcebuffers');
24244 this.triggerReady();
24245 };
24246
24247 _proto.triggerReady = function triggerReady() {
24248 // only allow ready to be triggered once, this prevents the case
24249 // where:
24250 // 1. we trigger createdsourcebuffers
24251 // 2. ie 11 synchronously initializates eme
24252 // 3. the synchronous initialization causes us to trigger ready
24253 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
24254 if (this.ready() && !this.triggeredReady_) {
24255 this.triggeredReady_ = true;
24256 this.trigger('ready');
24257 }
24258 }
24259 /**
24260 * Add a type of source buffer to the media source.
24261 *
24262 * @param {string} type
24263 * The type of source buffer to add.
24264 *
24265 * @param {string} codec
24266 * The codec to add the source buffer with.
24267 */
24268 ;
24269
24270 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
24271 pushQueue({
24272 type: 'mediaSource',
24273 sourceUpdater: this,
24274 action: actions.addSourceBuffer(type, codec),
24275 name: 'addSourceBuffer'
24276 });
24277 }
24278 /**
24279 * call abort on a source buffer.
24280 *
24281 * @param {string} type
24282 * The type of source buffer to call abort on.
24283 */
24284 ;
24285
24286 _proto.abort = function abort(type) {
24287 pushQueue({
24288 type: type,
24289 sourceUpdater: this,
24290 action: actions.abort(type),
24291 name: 'abort'
24292 });
24293 }
24294 /**
24295 * Call removeSourceBuffer and remove a specific type
24296 * of source buffer on the mediaSource.
24297 *
24298 * @param {string} type
24299 * The type of source buffer to remove.
24300 */
24301 ;
24302
24303 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
24304 if (!this.canRemoveSourceBuffer()) {
24305 videojs__default['default'].log.error('removeSourceBuffer is not supported!');
24306 return;
24307 }
24308
24309 pushQueue({
24310 type: 'mediaSource',
24311 sourceUpdater: this,
24312 action: actions.removeSourceBuffer(type),
24313 name: 'removeSourceBuffer'
24314 });
24315 }
24316 /**
24317 * Whether or not the removeSourceBuffer function is supported
24318 * on the mediaSource.
24319 *
24320 * @return {boolean}
24321 * if removeSourceBuffer can be called.
24322 */
24323 ;
24324
24325 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
24326 // IE reports that it supports removeSourceBuffer, but often throws
24327 // errors when attempting to use the function. So we report that it
24328 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
24329 // throws errors, so we report that it does not support this as well.
24330 return !videojs__default['default'].browser.IE_VERSION && !videojs__default['default'].browser.IS_FIREFOX && window__default['default'].MediaSource && window__default['default'].MediaSource.prototype && typeof window__default['default'].MediaSource.prototype.removeSourceBuffer === 'function';
24331 }
24332 /**
24333 * Whether or not the changeType function is supported
24334 * on our SourceBuffers.
24335 *
24336 * @return {boolean}
24337 * if changeType can be called.
24338 */
24339 ;
24340
24341 SourceUpdater.canChangeType = function canChangeType() {
24342 return window__default['default'].SourceBuffer && window__default['default'].SourceBuffer.prototype && typeof window__default['default'].SourceBuffer.prototype.changeType === 'function';
24343 }
24344 /**
24345 * Whether or not the changeType function is supported
24346 * on our SourceBuffers.
24347 *
24348 * @return {boolean}
24349 * if changeType can be called.
24350 */
24351 ;
24352
24353 _proto.canChangeType = function canChangeType() {
24354 return this.constructor.canChangeType();
24355 }
24356 /**
24357 * Call the changeType function on a source buffer, given the code and type.
24358 *
24359 * @param {string} type
24360 * The type of source buffer to call changeType on.
24361 *
24362 * @param {string} codec
24363 * The codec string to change type with on the source buffer.
24364 */
24365 ;
24366
24367 _proto.changeType = function changeType(type, codec) {
24368 if (!this.canChangeType()) {
24369 videojs__default['default'].log.error('changeType is not supported!');
24370 return;
24371 }
24372
24373 pushQueue({
24374 type: type,
24375 sourceUpdater: this,
24376 action: actions.changeType(codec),
24377 name: 'changeType'
24378 });
24379 }
24380 /**
24381 * Add source buffers with a codec or, if they are already created,
24382 * call changeType on source buffers using changeType.
24383 *
24384 * @param {Object} codecs
24385 * Codecs to switch to
24386 */
24387 ;
24388
24389 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
24390 var _this2 = this;
24391
24392 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
24393 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
24394 }
24395
24396 Object.keys(codecs).forEach(function (type) {
24397 var codec = codecs[type];
24398
24399 if (!_this2.hasCreatedSourceBuffers()) {
24400 return _this2.addSourceBuffer(type, codec);
24401 }
24402
24403 if (_this2.canChangeType()) {
24404 _this2.changeType(type, codec);
24405 }
24406 });
24407 }
24408 /**
24409 * Queue an update to append an ArrayBuffer.
24410 *
24411 * @param {MediaObject} object containing audioBytes and/or videoBytes
24412 * @param {Function} done the function to call when done
24413 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
24414 */
24415 ;
24416
24417 _proto.appendBuffer = function appendBuffer(options, doneFn) {
24418 var _this3 = this;
24419
24420 var segmentInfo = options.segmentInfo,
24421 type = options.type,
24422 bytes = options.bytes;
24423 this.processedAppend_ = true;
24424
24425 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
24426 this.delayedAudioAppendQueue_.push([options, doneFn]);
24427 this.logger_("delayed audio append of " + bytes.length + " until video append");
24428 return;
24429 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
24430 // not be fired. This means that the queue will be blocked until the next action
24431 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
24432 // these errors by calling the doneFn with the specific error.
24433
24434
24435 var onError = doneFn;
24436 pushQueue({
24437 type: type,
24438 sourceUpdater: this,
24439 action: actions.appendBuffer(bytes, segmentInfo || {
24440 mediaIndex: -1
24441 }, onError),
24442 doneFn: doneFn,
24443 name: 'appendBuffer'
24444 });
24445
24446 if (type === 'video') {
24447 this.videoAppendQueued_ = true;
24448
24449 if (!this.delayedAudioAppendQueue_.length) {
24450 return;
24451 }
24452
24453 var queue = this.delayedAudioAppendQueue_.slice();
24454 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
24455 this.delayedAudioAppendQueue_.length = 0;
24456 queue.forEach(function (que) {
24457 _this3.appendBuffer.apply(_this3, que);
24458 });
24459 }
24460 }
24461 /**
24462 * Get the audio buffer's buffered timerange.
24463 *
24464 * @return {TimeRange}
24465 * The audio buffer's buffered time range
24466 */
24467 ;
24468
24469 _proto.audioBuffered = function audioBuffered() {
24470 // no media source/source buffer or it isn't in the media sources
24471 // source buffer list
24472 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
24473 return videojs__default['default'].createTimeRange();
24474 }
24475
24476 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default['default'].createTimeRange();
24477 }
24478 /**
24479 * Get the video buffer's buffered timerange.
24480 *
24481 * @return {TimeRange}
24482 * The video buffer's buffered time range
24483 */
24484 ;
24485
24486 _proto.videoBuffered = function videoBuffered() {
24487 // no media source/source buffer or it isn't in the media sources
24488 // source buffer list
24489 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
24490 return videojs__default['default'].createTimeRange();
24491 }
24492
24493 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default['default'].createTimeRange();
24494 }
24495 /**
24496 * Get a combined video/audio buffer's buffered timerange.
24497 *
24498 * @return {TimeRange}
24499 * the combined time range
24500 */
24501 ;
24502
24503 _proto.buffered = function buffered() {
24504 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
24505 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
24506
24507 if (audio && !video) {
24508 return this.audioBuffered();
24509 }
24510
24511 if (video && !audio) {
24512 return this.videoBuffered();
24513 }
24514
24515 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
24516 }
24517 /**
24518 * Add a callback to the queue that will set duration on the mediaSource.
24519 *
24520 * @param {number} duration
24521 * The duration to set
24522 *
24523 * @param {Function} [doneFn]
24524 * function to run after duration has been set.
24525 */
24526 ;
24527
24528 _proto.setDuration = function setDuration(duration, doneFn) {
24529 if (doneFn === void 0) {
24530 doneFn = noop;
24531 }
24532
24533 // In order to set the duration on the media source, it's necessary to wait for all
24534 // source buffers to no longer be updating. "If the updating attribute equals true on
24535 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
24536 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
24537 pushQueue({
24538 type: 'mediaSource',
24539 sourceUpdater: this,
24540 action: actions.duration(duration),
24541 name: 'duration',
24542 doneFn: doneFn
24543 });
24544 }
24545 /**
24546 * Add a mediaSource endOfStream call to the queue
24547 *
24548 * @param {Error} [error]
24549 * Call endOfStream with an error
24550 *
24551 * @param {Function} [doneFn]
24552 * A function that should be called when the
24553 * endOfStream call has finished.
24554 */
24555 ;
24556
24557 _proto.endOfStream = function endOfStream(error, doneFn) {
24558 if (error === void 0) {
24559 error = null;
24560 }
24561
24562 if (doneFn === void 0) {
24563 doneFn = noop;
24564 }
24565
24566 if (typeof error !== 'string') {
24567 error = undefined;
24568 } // In order to set the duration on the media source, it's necessary to wait for all
24569 // source buffers to no longer be updating. "If the updating attribute equals true on
24570 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
24571 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
24572
24573
24574 pushQueue({
24575 type: 'mediaSource',
24576 sourceUpdater: this,
24577 action: actions.endOfStream(error),
24578 name: 'endOfStream',
24579 doneFn: doneFn
24580 });
24581 }
24582 /**
24583 * Queue an update to remove a time range from the buffer.
24584 *
24585 * @param {number} start where to start the removal
24586 * @param {number} end where to end the removal
24587 * @param {Function} [done=noop] optional callback to be executed when the remove
24588 * operation is complete
24589 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
24590 */
24591 ;
24592
24593 _proto.removeAudio = function removeAudio(start, end, done) {
24594 if (done === void 0) {
24595 done = noop;
24596 }
24597
24598 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
24599 done();
24600 return;
24601 }
24602
24603 pushQueue({
24604 type: 'audio',
24605 sourceUpdater: this,
24606 action: actions.remove(start, end),
24607 doneFn: done,
24608 name: 'remove'
24609 });
24610 }
24611 /**
24612 * Queue an update to remove a time range from the buffer.
24613 *
24614 * @param {number} start where to start the removal
24615 * @param {number} end where to end the removal
24616 * @param {Function} [done=noop] optional callback to be executed when the remove
24617 * operation is complete
24618 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
24619 */
24620 ;
24621
24622 _proto.removeVideo = function removeVideo(start, end, done) {
24623 if (done === void 0) {
24624 done = noop;
24625 }
24626
24627 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
24628 done();
24629 return;
24630 }
24631
24632 pushQueue({
24633 type: 'video',
24634 sourceUpdater: this,
24635 action: actions.remove(start, end),
24636 doneFn: done,
24637 name: 'remove'
24638 });
24639 }
24640 /**
24641 * Whether the underlying sourceBuffer is updating or not
24642 *
24643 * @return {boolean} the updating status of the SourceBuffer
24644 */
24645 ;
24646
24647 _proto.updating = function updating() {
24648 // the audio/video source buffer is updating
24649 if (_updating('audio', this) || _updating('video', this)) {
24650 return true;
24651 }
24652
24653 return false;
24654 }
24655 /**
24656 * Set/get the timestampoffset on the audio SourceBuffer
24657 *
24658 * @return {number} the timestamp offset
24659 */
24660 ;
24661
24662 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
24663 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
24664 this.audioTimestampOffset_ !== offset) {
24665 pushQueue({
24666 type: 'audio',
24667 sourceUpdater: this,
24668 action: actions.timestampOffset(offset),
24669 name: 'timestampOffset'
24670 });
24671 this.audioTimestampOffset_ = offset;
24672 }
24673
24674 return this.audioTimestampOffset_;
24675 }
24676 /**
24677 * Set/get the timestampoffset on the video SourceBuffer
24678 *
24679 * @return {number} the timestamp offset
24680 */
24681 ;
24682
24683 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
24684 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
24685 this.videoTimestampOffset !== offset) {
24686 pushQueue({
24687 type: 'video',
24688 sourceUpdater: this,
24689 action: actions.timestampOffset(offset),
24690 name: 'timestampOffset'
24691 });
24692 this.videoTimestampOffset_ = offset;
24693 }
24694
24695 return this.videoTimestampOffset_;
24696 }
24697 /**
24698 * Add a function to the queue that will be called
24699 * when it is its turn to run in the audio queue.
24700 *
24701 * @param {Function} callback
24702 * The callback to queue.
24703 */
24704 ;
24705
24706 _proto.audioQueueCallback = function audioQueueCallback(callback) {
24707 if (!this.audioBuffer) {
24708 return;
24709 }
24710
24711 pushQueue({
24712 type: 'audio',
24713 sourceUpdater: this,
24714 action: actions.callback(callback),
24715 name: 'callback'
24716 });
24717 }
24718 /**
24719 * Add a function to the queue that will be called
24720 * when it is its turn to run in the video queue.
24721 *
24722 * @param {Function} callback
24723 * The callback to queue.
24724 */
24725 ;
24726
24727 _proto.videoQueueCallback = function videoQueueCallback(callback) {
24728 if (!this.videoBuffer) {
24729 return;
24730 }
24731
24732 pushQueue({
24733 type: 'video',
24734 sourceUpdater: this,
24735 action: actions.callback(callback),
24736 name: 'callback'
24737 });
24738 }
24739 /**
24740 * dispose of the source updater and the underlying sourceBuffer
24741 */
24742 ;
24743
24744 _proto.dispose = function dispose() {
24745 var _this4 = this;
24746
24747 this.trigger('dispose');
24748 bufferTypes.forEach(function (type) {
24749 _this4.abort(type);
24750
24751 if (_this4.canRemoveSourceBuffer()) {
24752 _this4.removeSourceBuffer(type);
24753 } else {
24754 _this4[type + "QueueCallback"](function () {
24755 return cleanupBuffer(type, _this4);
24756 });
24757 }
24758 });
24759 this.videoAppendQueued_ = false;
24760 this.delayedAudioAppendQueue_.length = 0;
24761
24762 if (this.sourceopenListener_) {
24763 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
24764 }
24765
24766 this.off();
24767 };
24768
24769 return SourceUpdater;
24770 }(videojs__default['default'].EventTarget);
24771
24772 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
24773 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
24774 };
24775
24776 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
24777 return char.charCodeAt(0);
24778 }));
24779 /**
24780 * An object that manages segment loading and appending.
24781 *
24782 * @class VTTSegmentLoader
24783 * @param {Object} options required and optional options
24784 * @extends videojs.EventTarget
24785 */
24786
24787 var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
24788 inheritsLoose(VTTSegmentLoader, _SegmentLoader);
24789
24790 function VTTSegmentLoader(settings, options) {
24791 var _this;
24792
24793 if (options === void 0) {
24794 options = {};
24795 }
24796
24797 _this = _SegmentLoader.call(this, settings, options) || this; // VTT can't handle partial data
24798
24799 _this.handlePartialData_ = false; // SegmentLoader requires a MediaSource be specified or it will throw an error;
24800 // however, VTTSegmentLoader has no need of a media source, so delete the reference
24801
24802 _this.mediaSource_ = null;
24803 _this.subtitlesTrack_ = null;
24804 _this.loaderType_ = 'subtitle';
24805 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
24806 // the sync controller leads to improper behavior.
24807
24808 _this.shouldSaveSegmentTimingInfo_ = false;
24809 return _this;
24810 }
24811
24812 var _proto = VTTSegmentLoader.prototype;
24813
24814 _proto.createTransmuxer_ = function createTransmuxer_() {
24815 // don't need to transmux any subtitles
24816 return null;
24817 }
24818 /**
24819 * Indicates which time ranges are buffered
24820 *
24821 * @return {TimeRange}
24822 * TimeRange object representing the current buffered ranges
24823 */
24824 ;
24825
24826 _proto.buffered_ = function buffered_() {
24827 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
24828 return videojs__default['default'].createTimeRanges();
24829 }
24830
24831 var cues = this.subtitlesTrack_.cues;
24832 var start = cues[0].startTime;
24833 var end = cues[cues.length - 1].startTime;
24834 return videojs__default['default'].createTimeRanges([[start, end]]);
24835 }
24836 /**
24837 * Gets and sets init segment for the provided map
24838 *
24839 * @param {Object} map
24840 * The map object representing the init segment to get or set
24841 * @param {boolean=} set
24842 * If true, the init segment for the provided map should be saved
24843 * @return {Object}
24844 * map object for desired init segment
24845 */
24846 ;
24847
24848 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
24849 if (set === void 0) {
24850 set = false;
24851 }
24852
24853 if (!map) {
24854 return null;
24855 }
24856
24857 var id = initSegmentId(map);
24858 var storedMap = this.initSegments_[id];
24859
24860 if (set && !storedMap && map.bytes) {
24861 // append WebVTT line terminators to the media initialization segment if it exists
24862 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
24863 // requires two or more WebVTT line terminators between the WebVTT header and the
24864 // rest of the file
24865 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
24866 var combinedSegment = new Uint8Array(combinedByteLength);
24867 combinedSegment.set(map.bytes);
24868 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
24869 this.initSegments_[id] = storedMap = {
24870 resolvedUri: map.resolvedUri,
24871 byterange: map.byterange,
24872 bytes: combinedSegment
24873 };
24874 }
24875
24876 return storedMap || map;
24877 }
24878 /**
24879 * Returns true if all configuration required for loading is present, otherwise false.
24880 *
24881 * @return {boolean} True if the all configuration is ready for loading
24882 * @private
24883 */
24884 ;
24885
24886 _proto.couldBeginLoading_ = function couldBeginLoading_() {
24887 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
24888 }
24889 /**
24890 * Once all the starting parameters have been specified, begin
24891 * operation. This method should only be invoked from the INIT
24892 * state.
24893 *
24894 * @private
24895 */
24896 ;
24897
24898 _proto.init_ = function init_() {
24899 this.state = 'READY';
24900 this.resetEverything();
24901 return this.monitorBuffer_();
24902 }
24903 /**
24904 * Set a subtitle track on the segment loader to add subtitles to
24905 *
24906 * @param {TextTrack=} track
24907 * The text track to add loaded subtitles to
24908 * @return {TextTrack}
24909 * Returns the subtitles track
24910 */
24911 ;
24912
24913 _proto.track = function track(_track) {
24914 if (typeof _track === 'undefined') {
24915 return this.subtitlesTrack_;
24916 }
24917
24918 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
24919 // buffering now
24920
24921 if (this.state === 'INIT' && this.couldBeginLoading_()) {
24922 this.init_();
24923 }
24924
24925 return this.subtitlesTrack_;
24926 }
24927 /**
24928 * Remove any data in the source buffer between start and end times
24929 *
24930 * @param {number} start - the start time of the region to remove from the buffer
24931 * @param {number} end - the end time of the region to remove from the buffer
24932 */
24933 ;
24934
24935 _proto.remove = function remove(start, end) {
24936 removeCuesFromTrack(start, end, this.subtitlesTrack_);
24937 }
24938 /**
24939 * fill the buffer with segements unless the sourceBuffers are
24940 * currently updating
24941 *
24942 * Note: this function should only ever be called by monitorBuffer_
24943 * and never directly
24944 *
24945 * @private
24946 */
24947 ;
24948
24949 _proto.fillBuffer_ = function fillBuffer_() {
24950 var _this2 = this;
24951
24952 if (!this.syncPoint_) {
24953 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
24954 } // see if we need to begin loading immediately
24955
24956
24957 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
24958 segmentInfo = this.skipEmptySegments_(segmentInfo);
24959
24960 if (!segmentInfo) {
24961 return;
24962 }
24963
24964 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
24965 // We don't have the timestamp offset that we need to sync subtitles.
24966 // Rerun on a timestamp offset or user interaction.
24967 var checkTimestampOffset = function checkTimestampOffset() {
24968 _this2.state = 'READY';
24969
24970 if (!_this2.paused()) {
24971 // if not paused, queue a buffer check as soon as possible
24972 _this2.monitorBuffer_();
24973 }
24974 };
24975
24976 this.syncController_.one('timestampoffset', checkTimestampOffset);
24977 this.state = 'WAITING_ON_TIMELINE';
24978 return;
24979 }
24980
24981 this.loadSegment_(segmentInfo);
24982 }
24983 /**
24984 * Prevents the segment loader from requesting segments we know contain no subtitles
24985 * by walking forward until we find the next segment that we don't know whether it is
24986 * empty or not.
24987 *
24988 * @param {Object} segmentInfo
24989 * a segment info object that describes the current segment
24990 * @return {Object}
24991 * a segment info object that describes the current segment
24992 */
24993 ;
24994
24995 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
24996 while (segmentInfo && segmentInfo.segment.empty) {
24997 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
24998 }
24999
25000 return segmentInfo;
25001 };
25002
25003 _proto.stopForError = function stopForError(error) {
25004 this.error(error);
25005 this.state = 'READY';
25006 this.pause();
25007 this.trigger('error');
25008 }
25009 /**
25010 * append a decrypted segement to the SourceBuffer through a SourceUpdater
25011 *
25012 * @private
25013 */
25014 ;
25015
25016 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
25017 var _this3 = this;
25018
25019 if (!this.subtitlesTrack_) {
25020 this.state = 'READY';
25021 return;
25022 }
25023
25024 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
25025
25026 if (!this.pendingSegment_) {
25027 this.state = 'READY';
25028 this.mediaRequestsAborted += 1;
25029 return;
25030 }
25031
25032 if (error) {
25033 if (error.code === REQUEST_ERRORS.TIMEOUT) {
25034 this.handleTimeout_();
25035 }
25036
25037 if (error.code === REQUEST_ERRORS.ABORTED) {
25038 this.mediaRequestsAborted += 1;
25039 } else {
25040 this.mediaRequestsErrored += 1;
25041 }
25042
25043 this.stopForError(error);
25044 return;
25045 }
25046
25047 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
25048 // maintain functionality between segment loaders
25049
25050 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
25051 this.state = 'APPENDING'; // used for tests
25052
25053 this.trigger('appending');
25054 var segment = segmentInfo.segment;
25055
25056 if (segment.map) {
25057 segment.map.bytes = simpleSegment.map.bytes;
25058 }
25059
25060 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
25061
25062 if (typeof window__default['default'].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
25063 var loadHandler;
25064
25065 var errorHandler = function errorHandler() {
25066 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
25067
25068 _this3.stopForError({
25069 message: 'Error loading vtt.js'
25070 });
25071
25072 return;
25073 };
25074
25075 loadHandler = function loadHandler() {
25076 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
25077
25078 _this3.segmentRequestFinished_(error, simpleSegment, result);
25079 };
25080
25081 this.state = 'WAITING_ON_VTTJS';
25082 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
25083 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
25084 return;
25085 }
25086
25087 segment.requested = true;
25088
25089 try {
25090 this.parseVTTCues_(segmentInfo);
25091 } catch (e) {
25092 this.stopForError({
25093 message: e.message
25094 });
25095 return;
25096 }
25097
25098 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
25099
25100 if (segmentInfo.cues.length) {
25101 segmentInfo.timingInfo = {
25102 start: segmentInfo.cues[0].startTime,
25103 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
25104 };
25105 } else {
25106 segmentInfo.timingInfo = {
25107 start: segmentInfo.startOfSegment,
25108 end: segmentInfo.startOfSegment + segmentInfo.duration
25109 };
25110 }
25111
25112 if (segmentInfo.isSyncRequest) {
25113 this.trigger('syncinfoupdate');
25114 this.pendingSegment_ = null;
25115 this.state = 'READY';
25116 return;
25117 }
25118
25119 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
25120 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
25121 // the subtitle track
25122
25123 segmentInfo.cues.forEach(function (cue) {
25124 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window__default['default'].VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
25125 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
25126 // cues to have identical time-intervals, but if the text is also identical
25127 // we can safely assume it is a duplicate that can be removed (ex. when a cue
25128 // "overlaps" VTT segments)
25129
25130 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
25131 this.handleAppendsDone_();
25132 };
25133
25134 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
25135 // that we do not support here.
25136 };
25137
25138 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
25139 }
25140 /**
25141 * Uses the WebVTT parser to parse the segment response
25142 *
25143 * @param {Object} segmentInfo
25144 * a segment info object that describes the current segment
25145 * @private
25146 */
25147 ;
25148
25149 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
25150 var decoder;
25151 var decodeBytesToString = false;
25152
25153 if (typeof window__default['default'].TextDecoder === 'function') {
25154 decoder = new window__default['default'].TextDecoder('utf8');
25155 } else {
25156 decoder = window__default['default'].WebVTT.StringDecoder();
25157 decodeBytesToString = true;
25158 }
25159
25160 var parser = new window__default['default'].WebVTT.Parser(window__default['default'], window__default['default'].vttjs, decoder);
25161 segmentInfo.cues = [];
25162 segmentInfo.timestampmap = {
25163 MPEGTS: 0,
25164 LOCAL: 0
25165 };
25166 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
25167
25168 parser.ontimestampmap = function (map) {
25169 segmentInfo.timestampmap = map;
25170 };
25171
25172 parser.onparsingerror = function (error) {
25173 videojs__default['default'].log.warn('Error encountered when parsing cues: ' + error.message);
25174 };
25175
25176 if (segmentInfo.segment.map) {
25177 var mapData = segmentInfo.segment.map.bytes;
25178
25179 if (decodeBytesToString) {
25180 mapData = uint8ToUtf8(mapData);
25181 }
25182
25183 parser.parse(mapData);
25184 }
25185
25186 var segmentData = segmentInfo.bytes;
25187
25188 if (decodeBytesToString) {
25189 segmentData = uint8ToUtf8(segmentData);
25190 }
25191
25192 parser.parse(segmentData);
25193 parser.flush();
25194 }
25195 /**
25196 * Updates the start and end times of any cues parsed by the WebVTT parser using
25197 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
25198 * from the SyncController
25199 *
25200 * @param {Object} segmentInfo
25201 * a segment info object that describes the current segment
25202 * @param {Object} mappingObj
25203 * object containing a mapping from TS to media time
25204 * @param {Object} playlist
25205 * the playlist object containing the segment
25206 * @private
25207 */
25208 ;
25209
25210 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
25211 var segment = segmentInfo.segment;
25212
25213 if (!mappingObj) {
25214 // If the sync controller does not have a mapping of TS to Media Time for the
25215 // timeline, then we don't have enough information to update the cue
25216 // start/end times
25217 return;
25218 }
25219
25220 if (!segmentInfo.cues.length) {
25221 // If there are no cues, we also do not have enough information to figure out
25222 // segment timing. Mark that the segment contains no cues so we don't re-request
25223 // an empty segment.
25224 segment.empty = true;
25225 return;
25226 }
25227
25228 var timestampmap = segmentInfo.timestampmap;
25229 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
25230 segmentInfo.cues.forEach(function (cue) {
25231 // First convert cue time to TS time using the timestamp-map provided within the vtt
25232 cue.startTime += diff;
25233 cue.endTime += diff;
25234 });
25235
25236 if (!playlist.syncInfo) {
25237 var firstStart = segmentInfo.cues[0].startTime;
25238 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
25239 playlist.syncInfo = {
25240 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
25241 time: Math.min(firstStart, lastStart - segment.duration)
25242 };
25243 }
25244 };
25245
25246 return VTTSegmentLoader;
25247 }(SegmentLoader);
25248
25249 /**
25250 * @file ad-cue-tags.js
25251 */
25252 /**
25253 * Searches for an ad cue that overlaps with the given mediaTime
25254 *
25255 * @param {Object} track
25256 * the track to find the cue for
25257 *
25258 * @param {number} mediaTime
25259 * the time to find the cue at
25260 *
25261 * @return {Object|null}
25262 * the found cue or null
25263 */
25264
25265 var findAdCue = function findAdCue(track, mediaTime) {
25266 var cues = track.cues;
25267
25268 for (var i = 0; i < cues.length; i++) {
25269 var cue = cues[i];
25270
25271 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
25272 return cue;
25273 }
25274 }
25275
25276 return null;
25277 };
25278 var updateAdCues = function updateAdCues(media, track, offset) {
25279 if (offset === void 0) {
25280 offset = 0;
25281 }
25282
25283 if (!media.segments) {
25284 return;
25285 }
25286
25287 var mediaTime = offset;
25288 var cue;
25289
25290 for (var i = 0; i < media.segments.length; i++) {
25291 var segment = media.segments[i];
25292
25293 if (!cue) {
25294 // Since the cues will span for at least the segment duration, adding a fudge
25295 // factor of half segment duration will prevent duplicate cues from being
25296 // created when timing info is not exact (e.g. cue start time initialized
25297 // at 10.006677, but next call mediaTime is 10.003332 )
25298 cue = findAdCue(track, mediaTime + segment.duration / 2);
25299 }
25300
25301 if (cue) {
25302 if ('cueIn' in segment) {
25303 // Found a CUE-IN so end the cue
25304 cue.endTime = mediaTime;
25305 cue.adEndTime = mediaTime;
25306 mediaTime += segment.duration;
25307 cue = null;
25308 continue;
25309 }
25310
25311 if (mediaTime < cue.endTime) {
25312 // Already processed this mediaTime for this cue
25313 mediaTime += segment.duration;
25314 continue;
25315 } // otherwise extend cue until a CUE-IN is found
25316
25317
25318 cue.endTime += segment.duration;
25319 } else {
25320 if ('cueOut' in segment) {
25321 cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
25322 cue.adStartTime = mediaTime; // Assumes tag format to be
25323 // #EXT-X-CUE-OUT:30
25324
25325 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
25326 track.addCue(cue);
25327 }
25328
25329 if ('cueOutCont' in segment) {
25330 // Entered into the middle of an ad cue
25331 // Assumes tag formate to be
25332 // #EXT-X-CUE-OUT-CONT:10/30
25333 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
25334 adOffset = _segment$cueOutCont$s[0],
25335 adTotal = _segment$cueOutCont$s[1];
25336
25337 cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, '');
25338 cue.adStartTime = mediaTime - adOffset;
25339 cue.adEndTime = cue.adStartTime + adTotal;
25340 track.addCue(cue);
25341 }
25342 }
25343
25344 mediaTime += segment.duration;
25345 }
25346 };
25347
25348 var getSegmentIndex = function getSegmentIndex(i, playlist, currentTime) {
25349 if (currentTime === void 0) {
25350 currentTime = 0;
25351 }
25352
25353 var segments = playlist.segments;
25354 return playlist.endList || currentTime === 0 ? i : segments.length - (i + 1);
25355 };
25356
25357 var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
25358 // the equivalence display-time 0 === segment-index 0
25359 {
25360 name: 'VOD',
25361 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25362 if (duration !== Infinity) {
25363 var syncPoint = {
25364 time: 0,
25365 segmentIndex: 0
25366 };
25367 return syncPoint;
25368 }
25369
25370 return null;
25371 }
25372 }, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
25373 {
25374 name: 'ProgramDateTime',
25375 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25376 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
25377 return null;
25378 }
25379
25380 var segments = playlist.segments || [];
25381 var syncPoint = null;
25382 var lastDistance = null;
25383 currentTime = currentTime || 0;
25384
25385 for (var i = 0; i < segments.length; i++) {
25386 var segmentIndex = getSegmentIndex(i, playlist, currentTime);
25387 var segment = segments[segmentIndex];
25388 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
25389
25390 if (!datetimeMapping) {
25391 continue;
25392 }
25393
25394 if (segment.dateTimeObject) {
25395 var segmentTime = segment.dateTimeObject.getTime() / 1000;
25396 var segmentStart = segmentTime + datetimeMapping;
25397 var distance = Math.abs(currentTime - segmentStart); // Once the distance begins to increase, or if distance is 0, we have passed
25398 // currentTime and can stop looking for better candidates
25399
25400 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
25401 break;
25402 }
25403
25404 lastDistance = distance;
25405 syncPoint = {
25406 time: segmentStart,
25407 segmentIndex: segmentIndex
25408 };
25409 }
25410 }
25411
25412 return syncPoint;
25413 }
25414 }, // Stategy "Segment": We have a known time mapping for a timeline and a
25415 // segment in the current timeline with timing data
25416 {
25417 name: 'Segment',
25418 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25419 var segments = playlist.segments || [];
25420 var syncPoint = null;
25421 var lastDistance = null;
25422 currentTime = currentTime || 0;
25423
25424 for (var i = 0; i < segments.length; i++) {
25425 var segmentIndex = getSegmentIndex(i, playlist, currentTime);
25426 var segment = segments[segmentIndex];
25427
25428 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
25429 var distance = Math.abs(currentTime - segment.start); // Once the distance begins to increase, we have passed
25430 // currentTime and can stop looking for better candidates
25431
25432 if (lastDistance !== null && lastDistance < distance) {
25433 break;
25434 }
25435
25436 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
25437 lastDistance = distance;
25438 syncPoint = {
25439 time: segment.start,
25440 segmentIndex: segmentIndex
25441 };
25442 }
25443 }
25444 }
25445
25446 return syncPoint;
25447 }
25448 }, // Stategy "Discontinuity": We have a discontinuity with a known
25449 // display-time
25450 {
25451 name: 'Discontinuity',
25452 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25453 var syncPoint = null;
25454 currentTime = currentTime || 0;
25455
25456 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
25457 var lastDistance = null;
25458
25459 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
25460 var segmentIndex = playlist.discontinuityStarts[i];
25461 var discontinuity = playlist.discontinuitySequence + i + 1;
25462 var discontinuitySync = syncController.discontinuities[discontinuity];
25463
25464 if (discontinuitySync) {
25465 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
25466 // currentTime and can stop looking for better candidates
25467
25468 if (lastDistance !== null && lastDistance < distance) {
25469 break;
25470 }
25471
25472 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
25473 lastDistance = distance;
25474 syncPoint = {
25475 time: discontinuitySync.time,
25476 segmentIndex: segmentIndex
25477 };
25478 }
25479 }
25480 }
25481 }
25482
25483 return syncPoint;
25484 }
25485 }, // Stategy "Playlist": We have a playlist with a known mapping of
25486 // segment index to display time
25487 {
25488 name: 'Playlist',
25489 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25490 if (playlist.syncInfo) {
25491 var syncPoint = {
25492 time: playlist.syncInfo.time,
25493 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
25494 };
25495 return syncPoint;
25496 }
25497
25498 return null;
25499 }
25500 }];
25501
25502 var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
25503 inheritsLoose(SyncController, _videojs$EventTarget);
25504
25505 function SyncController(options) {
25506 var _this;
25507
25508 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
25509
25510 _this.timelines = [];
25511 _this.discontinuities = [];
25512 _this.timelineToDatetimeMappings = {};
25513 _this.logger_ = logger('SyncController');
25514 return _this;
25515 }
25516 /**
25517 * Find a sync-point for the playlist specified
25518 *
25519 * A sync-point is defined as a known mapping from display-time to
25520 * a segment-index in the current playlist.
25521 *
25522 * @param {Playlist} playlist
25523 * The playlist that needs a sync-point
25524 * @param {number} duration
25525 * Duration of the MediaSource (Infinite if playing a live source)
25526 * @param {number} currentTimeline
25527 * The last timeline from which a segment was loaded
25528 * @return {Object}
25529 * A sync-point object
25530 */
25531
25532
25533 var _proto = SyncController.prototype;
25534
25535 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
25536 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
25537
25538 if (!syncPoints.length) {
25539 // Signal that we need to attempt to get a sync-point manually
25540 // by fetching a segment in the playlist and constructing
25541 // a sync-point from that information
25542 return null;
25543 } // Now find the sync-point that is closest to the currentTime because
25544 // that should result in the most accurate guess about which segment
25545 // to fetch
25546
25547
25548 return this.selectSyncPoint_(syncPoints, {
25549 key: 'time',
25550 value: currentTime
25551 });
25552 }
25553 /**
25554 * Calculate the amount of time that has expired off the playlist during playback
25555 *
25556 * @param {Playlist} playlist
25557 * Playlist object to calculate expired from
25558 * @param {number} duration
25559 * Duration of the MediaSource (Infinity if playling a live source)
25560 * @return {number|null}
25561 * The amount of time that has expired off the playlist during playback. Null
25562 * if no sync-points for the playlist can be found.
25563 */
25564 ;
25565
25566 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
25567 if (!playlist || !playlist.segments) {
25568 return null;
25569 }
25570
25571 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
25572
25573 if (!syncPoints.length) {
25574 return null;
25575 }
25576
25577 var syncPoint = this.selectSyncPoint_(syncPoints, {
25578 key: 'segmentIndex',
25579 value: 0
25580 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
25581 // duration from index 0 to syncPoint.segmentIndex instead of adding.
25582
25583 if (syncPoint.segmentIndex > 0) {
25584 syncPoint.time *= -1;
25585 }
25586
25587 return Math.abs(syncPoint.time + sumDurations(playlist, syncPoint.segmentIndex, 0));
25588 }
25589 /**
25590 * Runs each sync-point strategy and returns a list of sync-points returned by the
25591 * strategies
25592 *
25593 * @private
25594 * @param {Playlist} playlist
25595 * The playlist that needs a sync-point
25596 * @param {number} duration
25597 * Duration of the MediaSource (Infinity if playing a live source)
25598 * @param {number} currentTimeline
25599 * The last timeline from which a segment was loaded
25600 * @return {Array}
25601 * A list of sync-point objects
25602 */
25603 ;
25604
25605 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
25606 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
25607
25608 for (var i = 0; i < syncPointStrategies.length; i++) {
25609 var strategy = syncPointStrategies[i];
25610 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
25611
25612 if (syncPoint) {
25613 syncPoint.strategy = strategy.name;
25614 syncPoints.push({
25615 strategy: strategy.name,
25616 syncPoint: syncPoint
25617 });
25618 }
25619 }
25620
25621 return syncPoints;
25622 }
25623 /**
25624 * Selects the sync-point nearest the specified target
25625 *
25626 * @private
25627 * @param {Array} syncPoints
25628 * List of sync-points to select from
25629 * @param {Object} target
25630 * Object specifying the property and value we are targeting
25631 * @param {string} target.key
25632 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
25633 * @param {number} target.value
25634 * The value to target for the specified key.
25635 * @return {Object}
25636 * The sync-point nearest the target
25637 */
25638 ;
25639
25640 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
25641 var bestSyncPoint = syncPoints[0].syncPoint;
25642 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
25643 var bestStrategy = syncPoints[0].strategy;
25644
25645 for (var i = 1; i < syncPoints.length; i++) {
25646 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
25647
25648 if (newDistance < bestDistance) {
25649 bestDistance = newDistance;
25650 bestSyncPoint = syncPoints[i].syncPoint;
25651 bestStrategy = syncPoints[i].strategy;
25652 }
25653 }
25654
25655 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex + "]"));
25656 return bestSyncPoint;
25657 }
25658 /**
25659 * Save any meta-data present on the segments when segments leave
25660 * the live window to the playlist to allow for synchronization at the
25661 * playlist level later.
25662 *
25663 * @param {Playlist} oldPlaylist - The previous active playlist
25664 * @param {Playlist} newPlaylist - The updated and most current playlist
25665 */
25666 ;
25667
25668 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
25669 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // When a segment expires from the playlist and it has a start time
25670 // save that information as a possible sync-point reference in future
25671
25672 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
25673 var lastRemovedSegment = oldPlaylist.segments[i];
25674
25675 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
25676 newPlaylist.syncInfo = {
25677 mediaSequence: oldPlaylist.mediaSequence + i,
25678 time: lastRemovedSegment.start
25679 };
25680 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
25681 this.trigger('syncinfoupdate');
25682 break;
25683 }
25684 }
25685 }
25686 /**
25687 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
25688 * before segments start to load.
25689 *
25690 * @param {Playlist} playlist - The currently active playlist
25691 */
25692 ;
25693
25694 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
25695 // It's possible for the playlist to be updated before playback starts, meaning time
25696 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
25697 // crossed, then the old time zero mapping (for the prior timeline) would be retained
25698 // unless the mappings are cleared.
25699 this.timelineToDatetimeMappings = {};
25700
25701 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
25702 var firstSegment = playlist.segments[0];
25703 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
25704 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
25705 }
25706 }
25707 /**
25708 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
25709 * based on the latest timing information.
25710 *
25711 * @param {Object} options
25712 * Options object
25713 * @param {SegmentInfo} options.segmentInfo
25714 * The current active request information
25715 * @param {boolean} options.shouldSaveTimelineMapping
25716 * If there's a timeline change, determines if the timeline mapping should be
25717 * saved for timeline mapping and program date time mappings.
25718 */
25719 ;
25720
25721 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
25722 var segmentInfo = _ref.segmentInfo,
25723 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
25724 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
25725 var segment = segmentInfo.segment;
25726
25727 if (didCalculateSegmentTimeMapping) {
25728 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
25729 // now with segment timing information
25730
25731 if (!segmentInfo.playlist.syncInfo) {
25732 segmentInfo.playlist.syncInfo = {
25733 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
25734 time: segment.start
25735 };
25736 }
25737 }
25738
25739 var dateTime = segment.dateTimeObject;
25740
25741 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
25742 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
25743 }
25744 };
25745
25746 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
25747 if (typeof this.timelines[timeline] === 'undefined') {
25748 return null;
25749 }
25750
25751 return this.timelines[timeline].time;
25752 };
25753
25754 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
25755 if (typeof this.timelines[timeline] === 'undefined') {
25756 return null;
25757 }
25758
25759 return this.timelines[timeline].mapping;
25760 }
25761 /**
25762 * Use the "media time" for a segment to generate a mapping to "display time" and
25763 * save that display time to the segment.
25764 *
25765 * @private
25766 * @param {SegmentInfo} segmentInfo
25767 * The current active request information
25768 * @param {Object} timingInfo
25769 * The start and end time of the current segment in "media time"
25770 * @param {boolean} shouldSaveTimelineMapping
25771 * If there's a timeline change, determines if the timeline mapping should be
25772 * saved in timelines.
25773 * @return {boolean}
25774 * Returns false if segment time mapping could not be calculated
25775 */
25776 ;
25777
25778 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
25779 var segment = segmentInfo.segment;
25780 var mappingObj = this.timelines[segmentInfo.timeline];
25781
25782 if (typeof segmentInfo.timestampOffset === 'number') {
25783 mappingObj = {
25784 time: segmentInfo.startOfSegment,
25785 mapping: segmentInfo.startOfSegment - timingInfo.start
25786 };
25787
25788 if (shouldSaveTimelineMapping) {
25789 this.timelines[segmentInfo.timeline] = mappingObj;
25790 this.trigger('timestampoffset');
25791 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
25792 }
25793
25794 segment.start = segmentInfo.startOfSegment;
25795 segment.end = timingInfo.end + mappingObj.mapping;
25796 } else if (mappingObj) {
25797 segment.start = timingInfo.start + mappingObj.mapping;
25798 segment.end = timingInfo.end + mappingObj.mapping;
25799 } else {
25800 return false;
25801 }
25802
25803 return true;
25804 }
25805 /**
25806 * Each time we have discontinuity in the playlist, attempt to calculate the location
25807 * in display of the start of the discontinuity and save that. We also save an accuracy
25808 * value so that we save values with the most accuracy (closest to 0.)
25809 *
25810 * @private
25811 * @param {SegmentInfo} segmentInfo - The current active request information
25812 */
25813 ;
25814
25815 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
25816 var playlist = segmentInfo.playlist;
25817 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
25818 // the start of the range and it's accuracy is 0 (greater accuracy values
25819 // mean more approximation)
25820
25821 if (segment.discontinuity) {
25822 this.discontinuities[segment.timeline] = {
25823 time: segment.start,
25824 accuracy: 0
25825 };
25826 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
25827 // Search for future discontinuities that we can provide better timing
25828 // information for and save that information for sync purposes
25829 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
25830 var segmentIndex = playlist.discontinuityStarts[i];
25831 var discontinuity = playlist.discontinuitySequence + i + 1;
25832 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
25833 var accuracy = Math.abs(mediaIndexDiff);
25834
25835 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
25836 var time = void 0;
25837
25838 if (mediaIndexDiff < 0) {
25839 time = segment.start - sumDurations(playlist, segmentInfo.mediaIndex, segmentIndex);
25840 } else {
25841 time = segment.end + sumDurations(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
25842 }
25843
25844 this.discontinuities[discontinuity] = {
25845 time: time,
25846 accuracy: accuracy
25847 };
25848 }
25849 }
25850 }
25851 };
25852
25853 _proto.dispose = function dispose() {
25854 this.trigger('dispose');
25855 this.off();
25856 };
25857
25858 return SyncController;
25859 }(videojs__default['default'].EventTarget);
25860
25861 /**
25862 * The TimelineChangeController acts as a source for segment loaders to listen for and
25863 * keep track of latest and pending timeline changes. This is useful to ensure proper
25864 * sync, as each loader may need to make a consideration for what timeline the other
25865 * loader is on before making changes which could impact the other loader's media.
25866 *
25867 * @class TimelineChangeController
25868 * @extends videojs.EventTarget
25869 */
25870
25871 var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
25872 inheritsLoose(TimelineChangeController, _videojs$EventTarget);
25873
25874 function TimelineChangeController() {
25875 var _this;
25876
25877 _this = _videojs$EventTarget.call(this) || this;
25878 _this.pendingTimelineChanges_ = {};
25879 _this.lastTimelineChanges_ = {};
25880 return _this;
25881 }
25882
25883 var _proto = TimelineChangeController.prototype;
25884
25885 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
25886 this.pendingTimelineChanges_[type] = null;
25887 this.trigger('pendingtimelinechange');
25888 };
25889
25890 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
25891 var type = _ref.type,
25892 from = _ref.from,
25893 to = _ref.to;
25894
25895 if (typeof from === 'number' && typeof to === 'number') {
25896 this.pendingTimelineChanges_[type] = {
25897 type: type,
25898 from: from,
25899 to: to
25900 };
25901 this.trigger('pendingtimelinechange');
25902 }
25903
25904 return this.pendingTimelineChanges_[type];
25905 };
25906
25907 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
25908 var type = _ref2.type,
25909 from = _ref2.from,
25910 to = _ref2.to;
25911
25912 if (typeof from === 'number' && typeof to === 'number') {
25913 this.lastTimelineChanges_[type] = {
25914 type: type,
25915 from: from,
25916 to: to
25917 };
25918 delete this.pendingTimelineChanges_[type];
25919 this.trigger('timelinechange');
25920 }
25921
25922 return this.lastTimelineChanges_[type];
25923 };
25924
25925 _proto.dispose = function dispose() {
25926 this.trigger('dispose');
25927 this.pendingTimelineChanges_ = {};
25928 this.lastTimelineChanges_ = {};
25929 this.off();
25930 };
25931
25932 return TimelineChangeController;
25933 }(videojs__default['default'].EventTarget);
25934
25935 /* rollup-plugin-worker-factory start for worker!/Users/gkatsevman/p/http-streaming-release/src/decrypter-worker.js */
25936 var workerCode = transform(getWorkerString(function () {
25937
25938 function createCommonjsModule(fn, basedir, module) {
25939 return module = {
25940 path: basedir,
25941 exports: {},
25942 require: function require(path, base) {
25943 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
25944 }
25945 }, fn(module, module.exports), module.exports;
25946 }
25947
25948 function commonjsRequire() {
25949 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
25950 }
25951
25952 var createClass = createCommonjsModule(function (module) {
25953 function _defineProperties(target, props) {
25954 for (var i = 0; i < props.length; i++) {
25955 var descriptor = props[i];
25956 descriptor.enumerable = descriptor.enumerable || false;
25957 descriptor.configurable = true;
25958 if ("value" in descriptor) descriptor.writable = true;
25959 Object.defineProperty(target, descriptor.key, descriptor);
25960 }
25961 }
25962
25963 function _createClass(Constructor, protoProps, staticProps) {
25964 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
25965 if (staticProps) _defineProperties(Constructor, staticProps);
25966 return Constructor;
25967 }
25968
25969 module.exports = _createClass;
25970 module.exports["default"] = module.exports, module.exports.__esModule = true;
25971 });
25972 var setPrototypeOf = createCommonjsModule(function (module) {
25973 function _setPrototypeOf(o, p) {
25974 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
25975 o.__proto__ = p;
25976 return o;
25977 };
25978
25979 module.exports["default"] = module.exports, module.exports.__esModule = true;
25980 return _setPrototypeOf(o, p);
25981 }
25982
25983 module.exports = _setPrototypeOf;
25984 module.exports["default"] = module.exports, module.exports.__esModule = true;
25985 });
25986 var inheritsLoose = createCommonjsModule(function (module) {
25987 function _inheritsLoose(subClass, superClass) {
25988 subClass.prototype = Object.create(superClass.prototype);
25989 subClass.prototype.constructor = subClass;
25990 setPrototypeOf(subClass, superClass);
25991 }
25992
25993 module.exports = _inheritsLoose;
25994 module.exports["default"] = module.exports, module.exports.__esModule = true;
25995 });
25996 /**
25997 * @file stream.js
25998 */
25999
26000 /**
26001 * A lightweight readable stream implemention that handles event dispatching.
26002 *
26003 * @class Stream
26004 */
26005
26006 var Stream = /*#__PURE__*/function () {
26007 function Stream() {
26008 this.listeners = {};
26009 }
26010 /**
26011 * Add a listener for a specified event type.
26012 *
26013 * @param {string} type the event name
26014 * @param {Function} listener the callback to be invoked when an event of
26015 * the specified type occurs
26016 */
26017
26018
26019 var _proto = Stream.prototype;
26020
26021 _proto.on = function on(type, listener) {
26022 if (!this.listeners[type]) {
26023 this.listeners[type] = [];
26024 }
26025
26026 this.listeners[type].push(listener);
26027 }
26028 /**
26029 * Remove a listener for a specified event type.
26030 *
26031 * @param {string} type the event name
26032 * @param {Function} listener a function previously registered for this
26033 * type of event through `on`
26034 * @return {boolean} if we could turn it off or not
26035 */
26036 ;
26037
26038 _proto.off = function off(type, listener) {
26039 if (!this.listeners[type]) {
26040 return false;
26041 }
26042
26043 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
26044 // In Video.js we slice listener functions
26045 // on trigger so that it does not mess up the order
26046 // while we loop through.
26047 //
26048 // Here we slice on off so that the loop in trigger
26049 // can continue using it's old reference to loop without
26050 // messing up the order.
26051
26052 this.listeners[type] = this.listeners[type].slice(0);
26053 this.listeners[type].splice(index, 1);
26054 return index > -1;
26055 }
26056 /**
26057 * Trigger an event of the specified type on this stream. Any additional
26058 * arguments to this function are passed as parameters to event listeners.
26059 *
26060 * @param {string} type the event name
26061 */
26062 ;
26063
26064 _proto.trigger = function trigger(type) {
26065 var callbacks = this.listeners[type];
26066
26067 if (!callbacks) {
26068 return;
26069 } // Slicing the arguments on every invocation of this method
26070 // can add a significant amount of overhead. Avoid the
26071 // intermediate object creation for the common case of a
26072 // single callback argument
26073
26074
26075 if (arguments.length === 2) {
26076 var length = callbacks.length;
26077
26078 for (var i = 0; i < length; ++i) {
26079 callbacks[i].call(this, arguments[1]);
26080 }
26081 } else {
26082 var args = Array.prototype.slice.call(arguments, 1);
26083 var _length = callbacks.length;
26084
26085 for (var _i = 0; _i < _length; ++_i) {
26086 callbacks[_i].apply(this, args);
26087 }
26088 }
26089 }
26090 /**
26091 * Destroys the stream and cleans up.
26092 */
26093 ;
26094
26095 _proto.dispose = function dispose() {
26096 this.listeners = {};
26097 }
26098 /**
26099 * Forwards all `data` events on this stream to the destination stream. The
26100 * destination stream should provide a method `push` to receive the data
26101 * events as they arrive.
26102 *
26103 * @param {Stream} destination the stream that will receive all `data` events
26104 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
26105 */
26106 ;
26107
26108 _proto.pipe = function pipe(destination) {
26109 this.on('data', function (data) {
26110 destination.push(data);
26111 });
26112 };
26113
26114 return Stream;
26115 }();
26116 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
26117
26118 /**
26119 * Returns the subarray of a Uint8Array without PKCS#7 padding.
26120 *
26121 * @param padded {Uint8Array} unencrypted bytes that have been padded
26122 * @return {Uint8Array} the unpadded bytes
26123 * @see http://tools.ietf.org/html/rfc5652
26124 */
26125
26126
26127 function unpad(padded) {
26128 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
26129 }
26130 /*! @name aes-decrypter @version 3.1.2 @license Apache-2.0 */
26131
26132 /**
26133 * @file aes.js
26134 *
26135 * This file contains an adaptation of the AES decryption algorithm
26136 * from the Standford Javascript Cryptography Library. That work is
26137 * covered by the following copyright and permissions notice:
26138 *
26139 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
26140 * All rights reserved.
26141 *
26142 * Redistribution and use in source and binary forms, with or without
26143 * modification, are permitted provided that the following conditions are
26144 * met:
26145 *
26146 * 1. Redistributions of source code must retain the above copyright
26147 * notice, this list of conditions and the following disclaimer.
26148 *
26149 * 2. Redistributions in binary form must reproduce the above
26150 * copyright notice, this list of conditions and the following
26151 * disclaimer in the documentation and/or other materials provided
26152 * with the distribution.
26153 *
26154 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
26155 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
26156 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26157 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
26158 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26159 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26160 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
26161 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26162 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26163 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26164 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26165 *
26166 * The views and conclusions contained in the software and documentation
26167 * are those of the authors and should not be interpreted as representing
26168 * official policies, either expressed or implied, of the authors.
26169 */
26170
26171 /**
26172 * Expand the S-box tables.
26173 *
26174 * @private
26175 */
26176
26177
26178 var precompute = function precompute() {
26179 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
26180 var encTable = tables[0];
26181 var decTable = tables[1];
26182 var sbox = encTable[4];
26183 var sboxInv = decTable[4];
26184 var i;
26185 var x;
26186 var xInv;
26187 var d = [];
26188 var th = [];
26189 var x2;
26190 var x4;
26191 var x8;
26192 var s;
26193 var tEnc;
26194 var tDec; // Compute double and third tables
26195
26196 for (i = 0; i < 256; i++) {
26197 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
26198 }
26199
26200 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
26201 // Compute sbox
26202 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
26203 s = s >> 8 ^ s & 255 ^ 99;
26204 sbox[x] = s;
26205 sboxInv[s] = x; // Compute MixColumns
26206
26207 x8 = d[x4 = d[x2 = d[x]]];
26208 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
26209 tEnc = d[s] * 0x101 ^ s * 0x1010100;
26210
26211 for (i = 0; i < 4; i++) {
26212 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
26213 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
26214 }
26215 } // Compactify. Considerable speedup on Firefox.
26216
26217
26218 for (i = 0; i < 5; i++) {
26219 encTable[i] = encTable[i].slice(0);
26220 decTable[i] = decTable[i].slice(0);
26221 }
26222
26223 return tables;
26224 };
26225
26226 var aesTables = null;
26227 /**
26228 * Schedule out an AES key for both encryption and decryption. This
26229 * is a low-level class. Use a cipher mode to do bulk encryption.
26230 *
26231 * @class AES
26232 * @param key {Array} The key as an array of 4, 6 or 8 words.
26233 */
26234
26235 var AES = /*#__PURE__*/function () {
26236 function AES(key) {
26237 /**
26238 * The expanded S-box and inverse S-box tables. These will be computed
26239 * on the client so that we don't have to send them down the wire.
26240 *
26241 * There are two tables, _tables[0] is for encryption and
26242 * _tables[1] is for decryption.
26243 *
26244 * The first 4 sub-tables are the expanded S-box with MixColumns. The
26245 * last (_tables[01][4]) is the S-box itself.
26246 *
26247 * @private
26248 */
26249 // if we have yet to precompute the S-box tables
26250 // do so now
26251 if (!aesTables) {
26252 aesTables = precompute();
26253 } // then make a copy of that object for use
26254
26255
26256 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
26257 var i;
26258 var j;
26259 var tmp;
26260 var sbox = this._tables[0][4];
26261 var decTable = this._tables[1];
26262 var keyLen = key.length;
26263 var rcon = 1;
26264
26265 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
26266 throw new Error('Invalid aes key size');
26267 }
26268
26269 var encKey = key.slice(0);
26270 var decKey = [];
26271 this._key = [encKey, decKey]; // schedule encryption keys
26272
26273 for (i = keyLen; i < 4 * keyLen + 28; i++) {
26274 tmp = encKey[i - 1]; // apply sbox
26275
26276 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
26277 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
26278
26279 if (i % keyLen === 0) {
26280 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
26281 rcon = rcon << 1 ^ (rcon >> 7) * 283;
26282 }
26283 }
26284
26285 encKey[i] = encKey[i - keyLen] ^ tmp;
26286 } // schedule decryption keys
26287
26288
26289 for (j = 0; i; j++, i--) {
26290 tmp = encKey[j & 3 ? i : i - 4];
26291
26292 if (i <= 4 || j < 4) {
26293 decKey[j] = tmp;
26294 } else {
26295 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
26296 }
26297 }
26298 }
26299 /**
26300 * Decrypt 16 bytes, specified as four 32-bit words.
26301 *
26302 * @param {number} encrypted0 the first word to decrypt
26303 * @param {number} encrypted1 the second word to decrypt
26304 * @param {number} encrypted2 the third word to decrypt
26305 * @param {number} encrypted3 the fourth word to decrypt
26306 * @param {Int32Array} out the array to write the decrypted words
26307 * into
26308 * @param {number} offset the offset into the output array to start
26309 * writing results
26310 * @return {Array} The plaintext.
26311 */
26312
26313
26314 var _proto = AES.prototype;
26315
26316 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
26317 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
26318
26319 var a = encrypted0 ^ key[0];
26320 var b = encrypted3 ^ key[1];
26321 var c = encrypted2 ^ key[2];
26322 var d = encrypted1 ^ key[3];
26323 var a2;
26324 var b2;
26325 var c2; // key.length === 2 ?
26326
26327 var nInnerRounds = key.length / 4 - 2;
26328 var i;
26329 var kIndex = 4;
26330 var table = this._tables[1]; // load up the tables
26331
26332 var table0 = table[0];
26333 var table1 = table[1];
26334 var table2 = table[2];
26335 var table3 = table[3];
26336 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
26337
26338 for (i = 0; i < nInnerRounds; i++) {
26339 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
26340 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
26341 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
26342 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
26343 kIndex += 4;
26344 a = a2;
26345 b = b2;
26346 c = c2;
26347 } // Last round.
26348
26349
26350 for (i = 0; i < 4; i++) {
26351 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
26352 a2 = a;
26353 a = b;
26354 b = c;
26355 c = d;
26356 d = a2;
26357 }
26358 };
26359
26360 return AES;
26361 }();
26362 /**
26363 * A wrapper around the Stream class to use setTimeout
26364 * and run stream "jobs" Asynchronously
26365 *
26366 * @class AsyncStream
26367 * @extends Stream
26368 */
26369
26370
26371 var AsyncStream = /*#__PURE__*/function (_Stream) {
26372 inheritsLoose(AsyncStream, _Stream);
26373
26374 function AsyncStream() {
26375 var _this;
26376
26377 _this = _Stream.call(this, Stream) || this;
26378 _this.jobs = [];
26379 _this.delay = 1;
26380 _this.timeout_ = null;
26381 return _this;
26382 }
26383 /**
26384 * process an async job
26385 *
26386 * @private
26387 */
26388
26389
26390 var _proto = AsyncStream.prototype;
26391
26392 _proto.processJob_ = function processJob_() {
26393 this.jobs.shift()();
26394
26395 if (this.jobs.length) {
26396 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
26397 } else {
26398 this.timeout_ = null;
26399 }
26400 }
26401 /**
26402 * push a job into the stream
26403 *
26404 * @param {Function} job the job to push into the stream
26405 */
26406 ;
26407
26408 _proto.push = function push(job) {
26409 this.jobs.push(job);
26410
26411 if (!this.timeout_) {
26412 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
26413 }
26414 };
26415
26416 return AsyncStream;
26417 }(Stream);
26418 /**
26419 * Convert network-order (big-endian) bytes into their little-endian
26420 * representation.
26421 */
26422
26423
26424 var ntoh = function ntoh(word) {
26425 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
26426 };
26427 /**
26428 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
26429 *
26430 * @param {Uint8Array} encrypted the encrypted bytes
26431 * @param {Uint32Array} key the bytes of the decryption key
26432 * @param {Uint32Array} initVector the initialization vector (IV) to
26433 * use for the first round of CBC.
26434 * @return {Uint8Array} the decrypted bytes
26435 *
26436 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
26437 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
26438 * @see https://tools.ietf.org/html/rfc2315
26439 */
26440
26441
26442 var decrypt = function decrypt(encrypted, key, initVector) {
26443 // word-level access to the encrypted bytes
26444 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
26445 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
26446
26447 var decrypted = new Uint8Array(encrypted.byteLength);
26448 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
26449 // decrypted data
26450
26451 var init0;
26452 var init1;
26453 var init2;
26454 var init3;
26455 var encrypted0;
26456 var encrypted1;
26457 var encrypted2;
26458 var encrypted3; // iteration variable
26459
26460 var wordIx; // pull out the words of the IV to ensure we don't modify the
26461 // passed-in reference and easier access
26462
26463 init0 = initVector[0];
26464 init1 = initVector[1];
26465 init2 = initVector[2];
26466 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
26467 // to each decrypted block
26468
26469 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
26470 // convert big-endian (network order) words into little-endian
26471 // (javascript order)
26472 encrypted0 = ntoh(encrypted32[wordIx]);
26473 encrypted1 = ntoh(encrypted32[wordIx + 1]);
26474 encrypted2 = ntoh(encrypted32[wordIx + 2]);
26475 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
26476
26477 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
26478 // plaintext
26479
26480 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
26481 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
26482 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
26483 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
26484
26485 init0 = encrypted0;
26486 init1 = encrypted1;
26487 init2 = encrypted2;
26488 init3 = encrypted3;
26489 }
26490
26491 return decrypted;
26492 };
26493 /**
26494 * The `Decrypter` class that manages decryption of AES
26495 * data through `AsyncStream` objects and the `decrypt`
26496 * function
26497 *
26498 * @param {Uint8Array} encrypted the encrypted bytes
26499 * @param {Uint32Array} key the bytes of the decryption key
26500 * @param {Uint32Array} initVector the initialization vector (IV) to
26501 * @param {Function} done the function to run when done
26502 * @class Decrypter
26503 */
26504
26505
26506 var Decrypter = /*#__PURE__*/function () {
26507 function Decrypter(encrypted, key, initVector, done) {
26508 var step = Decrypter.STEP;
26509 var encrypted32 = new Int32Array(encrypted.buffer);
26510 var decrypted = new Uint8Array(encrypted.byteLength);
26511 var i = 0;
26512 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
26513
26514 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
26515
26516 for (i = step; i < encrypted32.length; i += step) {
26517 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
26518 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
26519 } // invoke the done() callback when everything is finished
26520
26521
26522 this.asyncStream_.push(function () {
26523 // remove pkcs#7 padding from the decrypted bytes
26524 done(null, unpad(decrypted));
26525 });
26526 }
26527 /**
26528 * a getter for step the maximum number of bytes to process at one time
26529 *
26530 * @return {number} the value of step 32000
26531 */
26532
26533
26534 var _proto = Decrypter.prototype;
26535 /**
26536 * @private
26537 */
26538
26539 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
26540 return function () {
26541 var bytes = decrypt(encrypted, key, initVector);
26542 decrypted.set(bytes, encrypted.byteOffset);
26543 };
26544 };
26545
26546 createClass(Decrypter, null, [{
26547 key: "STEP",
26548 get: function get() {
26549 // 4 * 8000;
26550 return 32000;
26551 }
26552 }]);
26553 return Decrypter;
26554 }();
26555 /**
26556 * @file bin-utils.js
26557 */
26558
26559 /**
26560 * Creates an object for sending to a web worker modifying properties that are TypedArrays
26561 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
26562 *
26563 * @param {Object} message
26564 * Object of properties and values to send to the web worker
26565 * @return {Object}
26566 * Modified message with TypedArray values expanded
26567 * @function createTransferableMessage
26568 */
26569
26570
26571 var createTransferableMessage = function createTransferableMessage(message) {
26572 var transferable = {};
26573 Object.keys(message).forEach(function (key) {
26574 var value = message[key];
26575
26576 if (ArrayBuffer.isView(value)) {
26577 transferable[key] = {
26578 bytes: value.buffer,
26579 byteOffset: value.byteOffset,
26580 byteLength: value.byteLength
26581 };
26582 } else {
26583 transferable[key] = value;
26584 }
26585 });
26586 return transferable;
26587 };
26588 /* global self */
26589
26590 /**
26591 * Our web worker interface so that things can talk to aes-decrypter
26592 * that will be running in a web worker. the scope is passed to this by
26593 * webworkify.
26594 */
26595
26596
26597 self.onmessage = function (event) {
26598 var data = event.data;
26599 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
26600 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
26601 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
26602 /* eslint-disable no-new, handle-callback-err */
26603
26604 new Decrypter(encrypted, key, iv, function (err, bytes) {
26605 self.postMessage(createTransferableMessage({
26606 source: data.source,
26607 decrypted: bytes
26608 }), [bytes.buffer]);
26609 });
26610 /* eslint-enable */
26611 };
26612 }));
26613 var Decrypter = factory(workerCode);
26614 /* rollup-plugin-worker-factory end for worker!/Users/gkatsevman/p/http-streaming-release/src/decrypter-worker.js */
26615
26616 /**
26617 * Convert the properties of an HLS track into an audioTrackKind.
26618 *
26619 * @private
26620 */
26621
26622 var audioTrackKind_ = function audioTrackKind_(properties) {
26623 var kind = properties.default ? 'main' : 'alternative';
26624
26625 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
26626 kind = 'main-desc';
26627 }
26628
26629 return kind;
26630 };
26631 /**
26632 * Pause provided segment loader and playlist loader if active
26633 *
26634 * @param {SegmentLoader} segmentLoader
26635 * SegmentLoader to pause
26636 * @param {Object} mediaType
26637 * Active media type
26638 * @function stopLoaders
26639 */
26640
26641
26642 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
26643 segmentLoader.abort();
26644 segmentLoader.pause();
26645
26646 if (mediaType && mediaType.activePlaylistLoader) {
26647 mediaType.activePlaylistLoader.pause();
26648 mediaType.activePlaylistLoader = null;
26649 }
26650 };
26651 /**
26652 * Start loading provided segment loader and playlist loader
26653 *
26654 * @param {PlaylistLoader} playlistLoader
26655 * PlaylistLoader to start loading
26656 * @param {Object} mediaType
26657 * Active media type
26658 * @function startLoaders
26659 */
26660
26661 var startLoaders = function startLoaders(playlistLoader, mediaType) {
26662 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
26663 // playlist loader
26664 mediaType.activePlaylistLoader = playlistLoader;
26665 playlistLoader.load();
26666 };
26667 /**
26668 * Returns a function to be called when the media group changes. It performs a
26669 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
26670 * change of group is merely a rendition switch of the same content at another encoding,
26671 * rather than a change of content, such as switching audio from English to Spanish.
26672 *
26673 * @param {string} type
26674 * MediaGroup type
26675 * @param {Object} settings
26676 * Object containing required information for media groups
26677 * @return {Function}
26678 * Handler for a non-destructive resync of SegmentLoader when the active media
26679 * group changes.
26680 * @function onGroupChanged
26681 */
26682
26683 var onGroupChanged = function onGroupChanged(type, settings) {
26684 return function () {
26685 var _settings$segmentLoad = settings.segmentLoaders,
26686 segmentLoader = _settings$segmentLoad[type],
26687 mainSegmentLoader = _settings$segmentLoad.main,
26688 mediaType = settings.mediaTypes[type];
26689 var activeTrack = mediaType.activeTrack();
26690 var activeGroup = mediaType.getActiveGroup();
26691 var previousActiveLoader = mediaType.activePlaylistLoader;
26692 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
26693
26694 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
26695 return;
26696 }
26697
26698 mediaType.lastGroup_ = activeGroup;
26699 mediaType.lastTrack_ = activeTrack;
26700 stopLoaders(segmentLoader, mediaType);
26701
26702 if (!activeGroup || activeGroup.isMasterPlaylist) {
26703 // there is no group active or active group is a main playlist and won't change
26704 return;
26705 }
26706
26707 if (!activeGroup.playlistLoader) {
26708 if (previousActiveLoader) {
26709 // The previous group had a playlist loader but the new active group does not
26710 // this means we are switching from demuxed to muxed audio. In this case we want to
26711 // do a destructive reset of the main segment loader and not restart the audio
26712 // loaders.
26713 mainSegmentLoader.resetEverything();
26714 }
26715
26716 return;
26717 } // Non-destructive resync
26718
26719
26720 segmentLoader.resyncLoader();
26721 startLoaders(activeGroup.playlistLoader, mediaType);
26722 };
26723 };
26724 var onGroupChanging = function onGroupChanging(type, settings) {
26725 return function () {
26726 var segmentLoader = settings.segmentLoaders[type],
26727 mediaType = settings.mediaTypes[type];
26728 mediaType.lastGroup_ = null;
26729 segmentLoader.abort();
26730 segmentLoader.pause();
26731 };
26732 };
26733 /**
26734 * Returns a function to be called when the media track changes. It performs a
26735 * destructive reset of the SegmentLoader to ensure we start loading as close to
26736 * currentTime as possible.
26737 *
26738 * @param {string} type
26739 * MediaGroup type
26740 * @param {Object} settings
26741 * Object containing required information for media groups
26742 * @return {Function}
26743 * Handler for a destructive reset of SegmentLoader when the active media
26744 * track changes.
26745 * @function onTrackChanged
26746 */
26747
26748 var onTrackChanged = function onTrackChanged(type, settings) {
26749 return function () {
26750 var masterPlaylistLoader = settings.masterPlaylistLoader,
26751 _settings$segmentLoad2 = settings.segmentLoaders,
26752 segmentLoader = _settings$segmentLoad2[type],
26753 mainSegmentLoader = _settings$segmentLoad2.main,
26754 mediaType = settings.mediaTypes[type];
26755 var activeTrack = mediaType.activeTrack();
26756 var activeGroup = mediaType.getActiveGroup();
26757 var previousActiveLoader = mediaType.activePlaylistLoader;
26758 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
26759
26760 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
26761 return;
26762 }
26763
26764 mediaType.lastGroup_ = activeGroup;
26765 mediaType.lastTrack_ = activeTrack;
26766 stopLoaders(segmentLoader, mediaType);
26767
26768 if (!activeGroup) {
26769 // there is no group active so we do not want to restart loaders
26770 return;
26771 }
26772
26773 if (activeGroup.isMasterPlaylist) {
26774 // track did not change, do nothing
26775 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
26776 return;
26777 }
26778
26779 var mpc = settings.vhs.masterPlaylistController_;
26780 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
26781
26782 if (mpc.media() === newPlaylist) {
26783 return;
26784 }
26785
26786 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
26787 masterPlaylistLoader.pause();
26788 mainSegmentLoader.resetEverything();
26789 mpc.fastQualityChange_(newPlaylist);
26790 return;
26791 }
26792
26793 if (type === 'AUDIO') {
26794 if (!activeGroup.playlistLoader) {
26795 // when switching from demuxed audio/video to muxed audio/video (noted by no
26796 // playlist loader for the audio group), we want to do a destructive reset of the
26797 // main segment loader and not restart the audio loaders
26798 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
26799 // it should be stopped
26800
26801 mainSegmentLoader.resetEverything();
26802 return;
26803 } // although the segment loader is an audio segment loader, call the setAudio
26804 // function to ensure it is prepared to re-append the init segment (or handle other
26805 // config changes)
26806
26807
26808 segmentLoader.setAudio(true);
26809 mainSegmentLoader.setAudio(false);
26810 }
26811
26812 if (previousActiveLoader === activeGroup.playlistLoader) {
26813 // Nothing has actually changed. This can happen because track change events can fire
26814 // multiple times for a "single" change. One for enabling the new active track, and
26815 // one for disabling the track that was active
26816 startLoaders(activeGroup.playlistLoader, mediaType);
26817 return;
26818 }
26819
26820 if (segmentLoader.track) {
26821 // For WebVTT, set the new text track in the segmentloader
26822 segmentLoader.track(activeTrack);
26823 } // destructive reset
26824
26825
26826 segmentLoader.resetEverything();
26827 startLoaders(activeGroup.playlistLoader, mediaType);
26828 };
26829 };
26830 var onError = {
26831 /**
26832 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
26833 * an error.
26834 *
26835 * @param {string} type
26836 * MediaGroup type
26837 * @param {Object} settings
26838 * Object containing required information for media groups
26839 * @return {Function}
26840 * Error handler. Logs warning (or error if the playlist is blacklisted) to
26841 * console and switches back to default audio track.
26842 * @function onError.AUDIO
26843 */
26844 AUDIO: function AUDIO(type, settings) {
26845 return function () {
26846 var segmentLoader = settings.segmentLoaders[type],
26847 mediaType = settings.mediaTypes[type],
26848 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
26849 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
26850
26851 var activeTrack = mediaType.activeTrack();
26852 var activeGroup = mediaType.activeGroup();
26853 var id = (activeGroup.filter(function (group) {
26854 return group.default;
26855 })[0] || activeGroup[0]).id;
26856 var defaultTrack = mediaType.tracks[id];
26857
26858 if (activeTrack === defaultTrack) {
26859 // Default track encountered an error. All we can do now is blacklist the current
26860 // rendition and hope another will switch audio groups
26861 blacklistCurrentPlaylist({
26862 message: 'Problem encountered loading the default audio track.'
26863 });
26864 return;
26865 }
26866
26867 videojs__default['default'].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
26868
26869 for (var trackId in mediaType.tracks) {
26870 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
26871 }
26872
26873 mediaType.onTrackChanged();
26874 };
26875 },
26876
26877 /**
26878 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
26879 * an error.
26880 *
26881 * @param {string} type
26882 * MediaGroup type
26883 * @param {Object} settings
26884 * Object containing required information for media groups
26885 * @return {Function}
26886 * Error handler. Logs warning to console and disables the active subtitle track
26887 * @function onError.SUBTITLES
26888 */
26889 SUBTITLES: function SUBTITLES(type, settings) {
26890 return function () {
26891 var segmentLoader = settings.segmentLoaders[type],
26892 mediaType = settings.mediaTypes[type];
26893 videojs__default['default'].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
26894 stopLoaders(segmentLoader, mediaType);
26895 var track = mediaType.activeTrack();
26896
26897 if (track) {
26898 track.mode = 'disabled';
26899 }
26900
26901 mediaType.onTrackChanged();
26902 };
26903 }
26904 };
26905 var setupListeners = {
26906 /**
26907 * Setup event listeners for audio playlist loader
26908 *
26909 * @param {string} type
26910 * MediaGroup type
26911 * @param {PlaylistLoader|null} playlistLoader
26912 * PlaylistLoader to register listeners on
26913 * @param {Object} settings
26914 * Object containing required information for media groups
26915 * @function setupListeners.AUDIO
26916 */
26917 AUDIO: function AUDIO(type, playlistLoader, settings) {
26918 if (!playlistLoader) {
26919 // no playlist loader means audio will be muxed with the video
26920 return;
26921 }
26922
26923 var tech = settings.tech,
26924 requestOptions = settings.requestOptions,
26925 segmentLoader = settings.segmentLoaders[type];
26926 playlistLoader.on('loadedmetadata', function () {
26927 var media = playlistLoader.media();
26928 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
26929 // permits, start downloading segments
26930
26931 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
26932 segmentLoader.load();
26933 }
26934 });
26935 playlistLoader.on('loadedplaylist', function () {
26936 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
26937
26938 if (!tech.paused()) {
26939 segmentLoader.load();
26940 }
26941 });
26942 playlistLoader.on('error', onError[type](type, settings));
26943 },
26944
26945 /**
26946 * Setup event listeners for subtitle playlist loader
26947 *
26948 * @param {string} type
26949 * MediaGroup type
26950 * @param {PlaylistLoader|null} playlistLoader
26951 * PlaylistLoader to register listeners on
26952 * @param {Object} settings
26953 * Object containing required information for media groups
26954 * @function setupListeners.SUBTITLES
26955 */
26956 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
26957 var tech = settings.tech,
26958 requestOptions = settings.requestOptions,
26959 segmentLoader = settings.segmentLoaders[type],
26960 mediaType = settings.mediaTypes[type];
26961 playlistLoader.on('loadedmetadata', function () {
26962 var media = playlistLoader.media();
26963 segmentLoader.playlist(media, requestOptions);
26964 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
26965 // permits, start downloading segments
26966
26967 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
26968 segmentLoader.load();
26969 }
26970 });
26971 playlistLoader.on('loadedplaylist', function () {
26972 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
26973
26974 if (!tech.paused()) {
26975 segmentLoader.load();
26976 }
26977 });
26978 playlistLoader.on('error', onError[type](type, settings));
26979 }
26980 };
26981 var initialize = {
26982 /**
26983 * Setup PlaylistLoaders and AudioTracks for the audio groups
26984 *
26985 * @param {string} type
26986 * MediaGroup type
26987 * @param {Object} settings
26988 * Object containing required information for media groups
26989 * @function initialize.AUDIO
26990 */
26991 'AUDIO': function AUDIO(type, settings) {
26992 var vhs = settings.vhs,
26993 sourceType = settings.sourceType,
26994 segmentLoader = settings.segmentLoaders[type],
26995 requestOptions = settings.requestOptions,
26996 mediaGroups = settings.master.mediaGroups,
26997 _settings$mediaTypes$ = settings.mediaTypes[type],
26998 groups = _settings$mediaTypes$.groups,
26999 tracks = _settings$mediaTypes$.tracks,
27000 logger_ = _settings$mediaTypes$.logger_,
27001 masterPlaylistLoader = settings.masterPlaylistLoader;
27002 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
27003
27004 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
27005 mediaGroups[type] = {
27006 main: {
27007 default: {
27008 default: true
27009 }
27010 }
27011 };
27012 }
27013
27014 for (var groupId in mediaGroups[type]) {
27015 if (!groups[groupId]) {
27016 groups[groupId] = [];
27017 }
27018
27019 for (var variantLabel in mediaGroups[type][groupId]) {
27020 var properties = mediaGroups[type][groupId][variantLabel];
27021 var playlistLoader = void 0;
27022
27023 if (audioOnlyMaster) {
27024 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
27025 properties.isMasterPlaylist = true;
27026 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
27027 // use the resolved media playlist object
27028 } else if (sourceType === 'vhs-json' && properties.playlists) {
27029 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
27030 } else if (properties.resolvedUri) {
27031 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
27032 } else if (properties.playlists && sourceType === 'dash') {
27033 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27034 } else {
27035 // no resolvedUri means the audio is muxed with the video when using this
27036 // audio track
27037 playlistLoader = null;
27038 }
27039
27040 properties = videojs__default['default'].mergeOptions({
27041 id: variantLabel,
27042 playlistLoader: playlistLoader
27043 }, properties);
27044 setupListeners[type](type, properties.playlistLoader, settings);
27045 groups[groupId].push(properties);
27046
27047 if (typeof tracks[variantLabel] === 'undefined') {
27048 var track = new videojs__default['default'].AudioTrack({
27049 id: variantLabel,
27050 kind: audioTrackKind_(properties),
27051 enabled: false,
27052 language: properties.language,
27053 default: properties.default,
27054 label: variantLabel
27055 });
27056 tracks[variantLabel] = track;
27057 }
27058 }
27059 } // setup single error event handler for the segment loader
27060
27061
27062 segmentLoader.on('error', onError[type](type, settings));
27063 },
27064
27065 /**
27066 * Setup PlaylistLoaders and TextTracks for the subtitle groups
27067 *
27068 * @param {string} type
27069 * MediaGroup type
27070 * @param {Object} settings
27071 * Object containing required information for media groups
27072 * @function initialize.SUBTITLES
27073 */
27074 'SUBTITLES': function SUBTITLES(type, settings) {
27075 var tech = settings.tech,
27076 vhs = settings.vhs,
27077 sourceType = settings.sourceType,
27078 segmentLoader = settings.segmentLoaders[type],
27079 requestOptions = settings.requestOptions,
27080 mediaGroups = settings.master.mediaGroups,
27081 _settings$mediaTypes$2 = settings.mediaTypes[type],
27082 groups = _settings$mediaTypes$2.groups,
27083 tracks = _settings$mediaTypes$2.tracks,
27084 masterPlaylistLoader = settings.masterPlaylistLoader;
27085
27086 for (var groupId in mediaGroups[type]) {
27087 if (!groups[groupId]) {
27088 groups[groupId] = [];
27089 }
27090
27091 for (var variantLabel in mediaGroups[type][groupId]) {
27092 if (mediaGroups[type][groupId][variantLabel].forced) {
27093 // Subtitle playlists with the forced attribute are not selectable in Safari.
27094 // According to Apple's HLS Authoring Specification:
27095 // If content has forced subtitles and regular subtitles in a given language,
27096 // the regular subtitles track in that language MUST contain both the forced
27097 // subtitles and the regular subtitles for that language.
27098 // Because of this requirement and that Safari does not add forced subtitles,
27099 // forced subtitles are skipped here to maintain consistent experience across
27100 // all platforms
27101 continue;
27102 }
27103
27104 var properties = mediaGroups[type][groupId][variantLabel];
27105 var playlistLoader = void 0;
27106
27107 if (sourceType === 'hls') {
27108 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
27109 } else if (sourceType === 'dash') {
27110 var playlists = properties.playlists.filter(function (p) {
27111 return p.excludeUntil !== Infinity;
27112 });
27113
27114 if (!playlists.length) {
27115 return;
27116 }
27117
27118 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27119 } else if (sourceType === 'vhs-json') {
27120 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
27121 // as provided, otherwise use the resolved URI to load the playlist
27122 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
27123 }
27124
27125 properties = videojs__default['default'].mergeOptions({
27126 id: variantLabel,
27127 playlistLoader: playlistLoader
27128 }, properties);
27129 setupListeners[type](type, properties.playlistLoader, settings);
27130 groups[groupId].push(properties);
27131
27132 if (typeof tracks[variantLabel] === 'undefined') {
27133 var track = tech.addRemoteTextTrack({
27134 id: variantLabel,
27135 kind: 'subtitles',
27136 default: properties.default && properties.autoselect,
27137 language: properties.language,
27138 label: variantLabel
27139 }, false).track;
27140 tracks[variantLabel] = track;
27141 }
27142 }
27143 } // setup single error event handler for the segment loader
27144
27145
27146 segmentLoader.on('error', onError[type](type, settings));
27147 },
27148
27149 /**
27150 * Setup TextTracks for the closed-caption groups
27151 *
27152 * @param {String} type
27153 * MediaGroup type
27154 * @param {Object} settings
27155 * Object containing required information for media groups
27156 * @function initialize['CLOSED-CAPTIONS']
27157 */
27158 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
27159 var tech = settings.tech,
27160 mediaGroups = settings.master.mediaGroups,
27161 _settings$mediaTypes$3 = settings.mediaTypes[type],
27162 groups = _settings$mediaTypes$3.groups,
27163 tracks = _settings$mediaTypes$3.tracks;
27164
27165 for (var groupId in mediaGroups[type]) {
27166 if (!groups[groupId]) {
27167 groups[groupId] = [];
27168 }
27169
27170 for (var variantLabel in mediaGroups[type][groupId]) {
27171 var properties = mediaGroups[type][groupId][variantLabel]; // We only support CEA608 captions for now, so ignore anything that
27172 // doesn't use a CCx INSTREAM-ID
27173
27174 if (!properties.instreamId.match(/CC\d/)) {
27175 continue;
27176 } // No PlaylistLoader is required for Closed-Captions because the captions are
27177 // embedded within the video stream
27178
27179
27180 groups[groupId].push(videojs__default['default'].mergeOptions({
27181 id: variantLabel
27182 }, properties));
27183
27184 if (typeof tracks[variantLabel] === 'undefined') {
27185 var track = tech.addRemoteTextTrack({
27186 id: properties.instreamId,
27187 kind: 'captions',
27188 default: properties.default && properties.autoselect,
27189 language: properties.language,
27190 label: variantLabel
27191 }, false).track;
27192 tracks[variantLabel] = track;
27193 }
27194 }
27195 }
27196 }
27197 };
27198 /**
27199 * Returns a function used to get the active group of the provided type
27200 *
27201 * @param {string} type
27202 * MediaGroup type
27203 * @param {Object} settings
27204 * Object containing required information for media groups
27205 * @return {Function}
27206 * Function that returns the active media group for the provided type. Takes an
27207 * optional parameter {TextTrack} track. If no track is provided, a list of all
27208 * variants in the group, otherwise the variant corresponding to the provided
27209 * track is returned.
27210 * @function activeGroup
27211 */
27212
27213 var activeGroup = function activeGroup(type, settings) {
27214 return function (track) {
27215 var masterPlaylistLoader = settings.masterPlaylistLoader,
27216 groups = settings.mediaTypes[type].groups;
27217 var media = masterPlaylistLoader.media();
27218
27219 if (!media) {
27220 return null;
27221 }
27222
27223 var variants = null; // set to variants to main media active group
27224
27225 if (media.attributes[type]) {
27226 variants = groups[media.attributes[type]];
27227 }
27228
27229 var groupKeys = Object.keys(groups);
27230
27231 if (!variants) {
27232 // use the main group if it exists
27233 if (groups.main) {
27234 variants = groups.main; // only one group, use that one
27235 } else if (groupKeys.length === 1) {
27236 variants = groups[groupKeys[0]];
27237 }
27238 }
27239
27240 if (typeof track === 'undefined') {
27241 return variants;
27242 }
27243
27244 if (track === null || !variants) {
27245 // An active track was specified so a corresponding group is expected. track === null
27246 // means no track is currently active so there is no corresponding group
27247 return null;
27248 }
27249
27250 return variants.filter(function (props) {
27251 return props.id === track.id;
27252 })[0] || null;
27253 };
27254 };
27255 var activeTrack = {
27256 /**
27257 * Returns a function used to get the active track of type provided
27258 *
27259 * @param {string} type
27260 * MediaGroup type
27261 * @param {Object} settings
27262 * Object containing required information for media groups
27263 * @return {Function}
27264 * Function that returns the active media track for the provided type. Returns
27265 * null if no track is active
27266 * @function activeTrack.AUDIO
27267 */
27268 AUDIO: function AUDIO(type, settings) {
27269 return function () {
27270 var tracks = settings.mediaTypes[type].tracks;
27271
27272 for (var id in tracks) {
27273 if (tracks[id].enabled) {
27274 return tracks[id];
27275 }
27276 }
27277
27278 return null;
27279 };
27280 },
27281
27282 /**
27283 * Returns a function used to get the active track of type provided
27284 *
27285 * @param {string} type
27286 * MediaGroup type
27287 * @param {Object} settings
27288 * Object containing required information for media groups
27289 * @return {Function}
27290 * Function that returns the active media track for the provided type. Returns
27291 * null if no track is active
27292 * @function activeTrack.SUBTITLES
27293 */
27294 SUBTITLES: function SUBTITLES(type, settings) {
27295 return function () {
27296 var tracks = settings.mediaTypes[type].tracks;
27297
27298 for (var id in tracks) {
27299 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
27300 return tracks[id];
27301 }
27302 }
27303
27304 return null;
27305 };
27306 }
27307 };
27308 var getActiveGroup = function getActiveGroup(type, _ref) {
27309 var mediaTypes = _ref.mediaTypes;
27310 return function () {
27311 var activeTrack_ = mediaTypes[type].activeTrack();
27312
27313 if (!activeTrack_) {
27314 return null;
27315 }
27316
27317 return mediaTypes[type].activeGroup(activeTrack_);
27318 };
27319 };
27320 /**
27321 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
27322 * Closed-Captions) specified in the master manifest.
27323 *
27324 * @param {Object} settings
27325 * Object containing required information for setting up the media groups
27326 * @param {Tech} settings.tech
27327 * The tech of the player
27328 * @param {Object} settings.requestOptions
27329 * XHR request options used by the segment loaders
27330 * @param {PlaylistLoader} settings.masterPlaylistLoader
27331 * PlaylistLoader for the master source
27332 * @param {VhsHandler} settings.vhs
27333 * VHS SourceHandler
27334 * @param {Object} settings.master
27335 * The parsed master manifest
27336 * @param {Object} settings.mediaTypes
27337 * Object to store the loaders, tracks, and utility methods for each media type
27338 * @param {Function} settings.blacklistCurrentPlaylist
27339 * Blacklists the current rendition and forces a rendition switch.
27340 * @function setupMediaGroups
27341 */
27342
27343 var setupMediaGroups = function setupMediaGroups(settings) {
27344 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
27345 initialize[type](type, settings);
27346 });
27347 var mediaTypes = settings.mediaTypes,
27348 masterPlaylistLoader = settings.masterPlaylistLoader,
27349 tech = settings.tech,
27350 vhs = settings.vhs; // setup active group and track getters and change event handlers
27351
27352 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
27353 mediaTypes[type].activeGroup = activeGroup(type, settings);
27354 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
27355 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
27356 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
27357 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
27358 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
27359 }); // DO NOT enable the default subtitle or caption track.
27360 // DO enable the default audio track
27361
27362 var audioGroup = mediaTypes.AUDIO.activeGroup();
27363
27364 if (audioGroup) {
27365 var groupId = (audioGroup.filter(function (group) {
27366 return group.default;
27367 })[0] || audioGroup[0]).id;
27368 mediaTypes.AUDIO.tracks[groupId].enabled = true;
27369 mediaTypes.AUDIO.onGroupChanged();
27370 mediaTypes.AUDIO.onTrackChanged();
27371 }
27372
27373 masterPlaylistLoader.on('mediachange', function () {
27374 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
27375 return mediaTypes[type].onGroupChanged();
27376 });
27377 });
27378 masterPlaylistLoader.on('mediachanging', function () {
27379 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
27380 return mediaTypes[type].onGroupChanging();
27381 });
27382 }); // custom audio track change event handler for usage event
27383
27384 var onAudioTrackChanged = function onAudioTrackChanged() {
27385 mediaTypes.AUDIO.onTrackChanged();
27386 tech.trigger({
27387 type: 'usage',
27388 name: 'vhs-audio-change'
27389 });
27390 tech.trigger({
27391 type: 'usage',
27392 name: 'hls-audio-change'
27393 });
27394 };
27395
27396 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
27397 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
27398 vhs.on('dispose', function () {
27399 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
27400 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
27401 }); // clear existing audio tracks and add the ones we just created
27402
27403 tech.clearTracks('audio');
27404
27405 for (var id in mediaTypes.AUDIO.tracks) {
27406 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
27407 }
27408 };
27409 /**
27410 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
27411 * media type
27412 *
27413 * @return {Object}
27414 * Object to store the loaders, tracks, and utility methods for each media type
27415 * @function createMediaTypes
27416 */
27417
27418 var createMediaTypes = function createMediaTypes() {
27419 var mediaTypes = {};
27420 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
27421 mediaTypes[type] = {
27422 groups: {},
27423 tracks: {},
27424 activePlaylistLoader: null,
27425 activeGroup: noop,
27426 activeTrack: noop,
27427 getActiveGroup: noop,
27428 onGroupChanged: noop,
27429 onTrackChanged: noop,
27430 lastTrack_: null,
27431 logger_: logger("MediaGroups[" + type + "]")
27432 };
27433 });
27434 return mediaTypes;
27435 };
27436
27437 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
27438 var Vhs$1; // SegmentLoader stats that need to have each loader's
27439 // values summed to calculate the final value
27440
27441 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
27442
27443 var sumLoaderStat = function sumLoaderStat(stat) {
27444 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
27445 };
27446
27447 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
27448 var currentPlaylist = _ref.currentPlaylist,
27449 nextPlaylist = _ref.nextPlaylist,
27450 forwardBuffer = _ref.forwardBuffer,
27451 bufferLowWaterLine = _ref.bufferLowWaterLine,
27452 bufferHighWaterLine = _ref.bufferHighWaterLine,
27453 duration = _ref.duration,
27454 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
27455 log = _ref.log;
27456
27457 // we have no other playlist to switch to
27458 if (!nextPlaylist) {
27459 videojs__default['default'].log.warn('We received no playlist to switch to. Please check your stream.');
27460 return false;
27461 }
27462
27463 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id; // If the playlist is live, then we want to not take low water line into account.
27464 // This is because in LIVE, the player plays 3 segments from the end of the
27465 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
27466 // in those segments, a viewer will never experience a rendition upswitch.
27467
27468 if (!currentPlaylist || !currentPlaylist.endList) {
27469 log(sharedLogLine + " as current playlist " + (!currentPlaylist ? 'is not set' : 'is live'));
27470 return true;
27471 } // no need to switch playlist is the same
27472
27473
27474 if (nextPlaylist.id === currentPlaylist.id) {
27475 return false;
27476 }
27477
27478 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
27479 // duration is below the max potential low water line
27480
27481 if (duration < maxBufferLowWaterLine) {
27482 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
27483 return true;
27484 }
27485
27486 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
27487 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
27488 // we can switch down
27489
27490 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
27491 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
27492
27493 if (experimentalBufferBasedABR) {
27494 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
27495 }
27496
27497 log(logLine);
27498 return true;
27499 } // and if our buffer is higher than the low water line,
27500 // we can switch up
27501
27502
27503 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
27504 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
27505
27506 if (experimentalBufferBasedABR) {
27507 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
27508 }
27509
27510 log(_logLine);
27511 return true;
27512 }
27513
27514 log("not " + sharedLogLine + " as no switching criteria met");
27515 return false;
27516 };
27517 /**
27518 * the master playlist controller controller all interactons
27519 * between playlists and segmentloaders. At this time this mainly
27520 * involves a master playlist and a series of audio playlists
27521 * if they are available
27522 *
27523 * @class MasterPlaylistController
27524 * @extends videojs.EventTarget
27525 */
27526
27527
27528 var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
27529 inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
27530
27531 function MasterPlaylistController(options) {
27532 var _this;
27533
27534 _this = _videojs$EventTarget.call(this) || this;
27535 var src = options.src,
27536 handleManifestRedirects = options.handleManifestRedirects,
27537 withCredentials = options.withCredentials,
27538 tech = options.tech,
27539 bandwidth = options.bandwidth,
27540 externVhs = options.externVhs,
27541 useCueTags = options.useCueTags,
27542 blacklistDuration = options.blacklistDuration,
27543 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
27544 sourceType = options.sourceType,
27545 cacheEncryptionKeys = options.cacheEncryptionKeys,
27546 handlePartialData = options.handlePartialData,
27547 experimentalBufferBasedABR = options.experimentalBufferBasedABR;
27548
27549 if (!src) {
27550 throw new Error('A non-empty playlist URL or JSON manifest string is required');
27551 }
27552
27553 Vhs$1 = externVhs;
27554 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
27555 _this.withCredentials = withCredentials;
27556 _this.tech_ = tech;
27557 _this.vhs_ = tech.vhs;
27558 _this.sourceType_ = sourceType;
27559 _this.useCueTags_ = useCueTags;
27560 _this.blacklistDuration = blacklistDuration;
27561 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
27562
27563 if (_this.useCueTags_) {
27564 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
27565 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
27566 }
27567
27568 _this.requestOptions_ = {
27569 withCredentials: withCredentials,
27570 handleManifestRedirects: handleManifestRedirects,
27571 timeout: null
27572 };
27573
27574 _this.on('error', _this.pauseLoading);
27575
27576 _this.mediaTypes_ = createMediaTypes();
27577 _this.mediaSource = new window__default['default'].MediaSource();
27578 _this.handleDurationChange_ = _this.handleDurationChange_.bind(assertThisInitialized(_this));
27579 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(assertThisInitialized(_this));
27580 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(assertThisInitialized(_this));
27581
27582 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
27583
27584
27585 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
27586
27587 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
27588 // everything, and the MediaSource should not be detached without a proper disposal
27589
27590
27591 _this.seekable_ = videojs__default['default'].createTimeRanges();
27592 _this.hasPlayed_ = false;
27593 _this.syncController_ = new SyncController(options);
27594 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
27595 kind: 'metadata',
27596 label: 'segment-metadata'
27597 }, false).track;
27598 _this.decrypter_ = new Decrypter();
27599 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
27600 _this.inbandTextTracks_ = {};
27601 _this.timelineChangeController_ = new TimelineChangeController();
27602 var segmentLoaderSettings = {
27603 vhs: _this.vhs_,
27604 parse708captions: options.parse708captions,
27605 mediaSource: _this.mediaSource,
27606 currentTime: _this.tech_.currentTime.bind(_this.tech_),
27607 seekable: function seekable() {
27608 return _this.seekable();
27609 },
27610 seeking: function seeking() {
27611 return _this.tech_.seeking();
27612 },
27613 duration: function duration() {
27614 return _this.duration();
27615 },
27616 hasPlayed: function hasPlayed() {
27617 return _this.hasPlayed_;
27618 },
27619 goalBufferLength: function goalBufferLength() {
27620 return _this.goalBufferLength();
27621 },
27622 bandwidth: bandwidth,
27623 syncController: _this.syncController_,
27624 decrypter: _this.decrypter_,
27625 sourceType: _this.sourceType_,
27626 inbandTextTracks: _this.inbandTextTracks_,
27627 cacheEncryptionKeys: cacheEncryptionKeys,
27628 handlePartialData: handlePartialData,
27629 sourceUpdater: _this.sourceUpdater_,
27630 timelineChangeController: _this.timelineChangeController_
27631 }; // The source type check not only determines whether a special DASH playlist loader
27632 // should be used, but also covers the case where the provided src is a vhs-json
27633 // manifest object (instead of a URL). In the case of vhs-json, the default
27634 // PlaylistLoader should be used.
27635
27636 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
27637
27638 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
27639 // combined audio/video or just video when alternate audio track is selected
27640
27641
27642 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
27643 segmentMetadataTrack: _this.segmentMetadataTrack_,
27644 loaderType: 'main'
27645 }), options); // alternate audio track
27646
27647 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
27648 loaderType: 'audio'
27649 }), options);
27650 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
27651 loaderType: 'vtt',
27652 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
27653 }), options);
27654
27655 _this.setupSegmentLoaderListeners_();
27656
27657 if (_this.experimentalBufferBasedABR) {
27658 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
27659 return _this.startABRTimer_();
27660 });
27661
27662 _this.tech_.on('pause', function () {
27663 return _this.stopABRTimer_();
27664 });
27665
27666 _this.tech_.on('play', function () {
27667 return _this.startABRTimer_();
27668 });
27669 } // Create SegmentLoader stat-getters
27670 // mediaRequests_
27671 // mediaRequestsAborted_
27672 // mediaRequestsTimedout_
27673 // mediaRequestsErrored_
27674 // mediaTransferDuration_
27675 // mediaBytesTransferred_
27676
27677
27678 loaderStats.forEach(function (stat) {
27679 _this[stat + '_'] = sumLoaderStat.bind(assertThisInitialized(_this), stat);
27680 });
27681 _this.logger_ = logger('MPC');
27682 _this.triggeredFmp4Usage = false;
27683
27684 if (_this.tech_.preload() === 'none') {
27685 _this.loadOnPlay_ = function () {
27686 _this.loadOnPlay_ = null;
27687
27688 _this.masterPlaylistLoader_.load();
27689 };
27690
27691 _this.tech_.one('play', _this.loadOnPlay_);
27692 } else {
27693 _this.masterPlaylistLoader_.load();
27694 }
27695
27696 return _this;
27697 }
27698 /**
27699 * Run selectPlaylist and switch to the new playlist if we should
27700 *
27701 * @private
27702 *
27703 */
27704
27705
27706 var _proto = MasterPlaylistController.prototype;
27707
27708 _proto.checkABR_ = function checkABR_() {
27709 var nextPlaylist = this.selectPlaylist();
27710
27711 if (this.shouldSwitchToMedia_(nextPlaylist)) {
27712 this.switchMedia_(nextPlaylist, 'abr');
27713 }
27714 };
27715
27716 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
27717 var oldMedia = this.media();
27718 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
27719 var newId = playlist.id || playlist.uri;
27720
27721 if (oldId && oldId !== newId) {
27722 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
27723 this.tech_.trigger({
27724 type: 'usage',
27725 name: "vhs-rendition-change-" + cause
27726 });
27727 }
27728
27729 this.masterPlaylistLoader_.media(playlist, delay);
27730 }
27731 /**
27732 * Start a timer that periodically calls checkABR_
27733 *
27734 * @private
27735 */
27736 ;
27737
27738 _proto.startABRTimer_ = function startABRTimer_() {
27739 var _this2 = this;
27740
27741 this.stopABRTimer_();
27742 this.abrTimer_ = window__default['default'].setInterval(function () {
27743 return _this2.checkABR_();
27744 }, 250);
27745 }
27746 /**
27747 * Stop the timer that periodically calls checkABR_
27748 *
27749 * @private
27750 */
27751 ;
27752
27753 _proto.stopABRTimer_ = function stopABRTimer_() {
27754 // if we're scrubbing, we don't need to pause.
27755 // This getter will be added to Video.js in version 7.11.
27756 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
27757 return;
27758 }
27759
27760 window__default['default'].clearInterval(this.abrTimer_);
27761 this.abrTimer_ = null;
27762 }
27763 /**
27764 * Get a list of playlists for the currently selected audio playlist
27765 *
27766 * @return {Array} the array of audio playlists
27767 */
27768 ;
27769
27770 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
27771 var master = this.master(); // if we don't have any audio groups then we can only
27772 // assume that the audio tracks are contained in masters
27773 // playlist array, use that or an empty array.
27774
27775 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
27776 return master && master.playlists || [];
27777 }
27778
27779 var AUDIO = master.mediaGroups.AUDIO;
27780 var groupKeys = Object.keys(AUDIO);
27781 var track; // get the current active track
27782
27783 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
27784 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
27785 } else {
27786 // default group is `main` or just the first group.
27787 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
27788
27789 for (var label in defaultGroup) {
27790 if (defaultGroup[label].default) {
27791 track = {
27792 label: label
27793 };
27794 break;
27795 }
27796 }
27797 } // no active track no playlists.
27798
27799
27800 if (!track) {
27801 return [];
27802 }
27803
27804 var playlists = []; // get all of the playlists that are possible for the
27805 // active track.
27806
27807 for (var group in AUDIO) {
27808 if (AUDIO[group][track.label]) {
27809 var properties = AUDIO[group][track.label];
27810
27811 if (properties.playlists) {
27812 playlists.push.apply(playlists, properties.playlists);
27813 } else {
27814 playlists.push(properties);
27815 }
27816 }
27817 }
27818
27819 return playlists;
27820 }
27821 /**
27822 * Register event handlers on the master playlist loader. A helper
27823 * function for construction time.
27824 *
27825 * @private
27826 */
27827 ;
27828
27829 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
27830 var _this3 = this;
27831
27832 this.masterPlaylistLoader_.on('loadedmetadata', function () {
27833 var media = _this3.masterPlaylistLoader_.media();
27834
27835 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
27836 // timeout the request.
27837
27838 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
27839 _this3.requestOptions_.timeout = 0;
27840 } else {
27841 _this3.requestOptions_.timeout = requestTimeout;
27842 } // if this isn't a live video and preload permits, start
27843 // downloading segments
27844
27845
27846 if (media.endList && _this3.tech_.preload() !== 'none') {
27847 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
27848
27849 _this3.mainSegmentLoader_.load();
27850 }
27851
27852 setupMediaGroups({
27853 sourceType: _this3.sourceType_,
27854 segmentLoaders: {
27855 AUDIO: _this3.audioSegmentLoader_,
27856 SUBTITLES: _this3.subtitleSegmentLoader_,
27857 main: _this3.mainSegmentLoader_
27858 },
27859 tech: _this3.tech_,
27860 requestOptions: _this3.requestOptions_,
27861 masterPlaylistLoader: _this3.masterPlaylistLoader_,
27862 vhs: _this3.vhs_,
27863 master: _this3.master(),
27864 mediaTypes: _this3.mediaTypes_,
27865 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
27866 });
27867
27868 _this3.triggerPresenceUsage_(_this3.master(), media);
27869
27870 _this3.setupFirstPlay();
27871
27872 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
27873 _this3.trigger('selectedinitialmedia');
27874 } else {
27875 // We must wait for the active audio playlist loader to
27876 // finish setting up before triggering this event so the
27877 // representations API and EME setup is correct
27878 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
27879 _this3.trigger('selectedinitialmedia');
27880 });
27881 }
27882 });
27883 this.masterPlaylistLoader_.on('loadedplaylist', function () {
27884 if (_this3.loadOnPlay_) {
27885 _this3.tech_.off('play', _this3.loadOnPlay_);
27886 }
27887
27888 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
27889
27890 if (!updatedPlaylist) {
27891 // exclude any variants that are not supported by the browser before selecting
27892 // an initial media as the playlist selectors do not consider browser support
27893 _this3.excludeUnsupportedVariants_();
27894
27895 var selectedMedia;
27896
27897 if (_this3.enableLowInitialPlaylist) {
27898 selectedMedia = _this3.selectInitialPlaylist();
27899 }
27900
27901 if (!selectedMedia) {
27902 selectedMedia = _this3.selectPlaylist();
27903 }
27904
27905 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
27906 return;
27907 }
27908
27909 _this3.initialMedia_ = selectedMedia;
27910
27911 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
27912 // fire again since the playlist will be requested. In the case of vhs-json
27913 // (where the manifest object is provided as the source), when the media
27914 // playlist's `segments` list is already available, a media playlist won't be
27915 // requested, and loadedplaylist won't fire again, so the playlist handler must be
27916 // called on its own here.
27917
27918
27919 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
27920
27921 if (!haveJsonSource) {
27922 return;
27923 }
27924
27925 updatedPlaylist = _this3.initialMedia_;
27926 }
27927
27928 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
27929 });
27930 this.masterPlaylistLoader_.on('error', function () {
27931 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
27932 });
27933 this.masterPlaylistLoader_.on('mediachanging', function () {
27934 _this3.mainSegmentLoader_.abort();
27935
27936 _this3.mainSegmentLoader_.pause();
27937 });
27938 this.masterPlaylistLoader_.on('mediachange', function () {
27939 var media = _this3.masterPlaylistLoader_.media();
27940
27941 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
27942 // timeout the request.
27943
27944 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
27945 _this3.requestOptions_.timeout = 0;
27946 } else {
27947 _this3.requestOptions_.timeout = requestTimeout;
27948 } // TODO: Create a new event on the PlaylistLoader that signals
27949 // that the segments have changed in some way and use that to
27950 // update the SegmentLoader instead of doing it twice here and
27951 // on `loadedplaylist`
27952
27953
27954 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
27955
27956 _this3.mainSegmentLoader_.load();
27957
27958 _this3.tech_.trigger({
27959 type: 'mediachange',
27960 bubbles: true
27961 });
27962 });
27963 this.masterPlaylistLoader_.on('playlistunchanged', function () {
27964 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
27965 // excluded for not-changing. We likely just have a really slowly updating
27966 // playlist.
27967
27968
27969 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
27970 return;
27971 }
27972
27973 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
27974
27975 if (playlistOutdated) {
27976 // Playlist has stopped updating and we're stuck at its end. Try to
27977 // blacklist it and switch to another playlist in the hope that that
27978 // one is updating (and give the player a chance to re-adjust to the
27979 // safe live point).
27980 _this3.blacklistCurrentPlaylist({
27981 message: 'Playlist no longer updating.',
27982 reason: 'playlist-unchanged'
27983 }); // useful for monitoring QoS
27984
27985
27986 _this3.tech_.trigger('playliststuck');
27987 }
27988 });
27989 this.masterPlaylistLoader_.on('renditiondisabled', function () {
27990 _this3.tech_.trigger({
27991 type: 'usage',
27992 name: 'vhs-rendition-disabled'
27993 });
27994
27995 _this3.tech_.trigger({
27996 type: 'usage',
27997 name: 'hls-rendition-disabled'
27998 });
27999 });
28000 this.masterPlaylistLoader_.on('renditionenabled', function () {
28001 _this3.tech_.trigger({
28002 type: 'usage',
28003 name: 'vhs-rendition-enabled'
28004 });
28005
28006 _this3.tech_.trigger({
28007 type: 'usage',
28008 name: 'hls-rendition-enabled'
28009 });
28010 });
28011 }
28012 /**
28013 * Given an updated media playlist (whether it was loaded for the first time, or
28014 * refreshed for live playlists), update any relevant properties and state to reflect
28015 * changes in the media that should be accounted for (e.g., cues and duration).
28016 *
28017 * @param {Object} updatedPlaylist the updated media playlist object
28018 *
28019 * @private
28020 */
28021 ;
28022
28023 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
28024 if (this.useCueTags_) {
28025 this.updateAdCues_(updatedPlaylist);
28026 } // TODO: Create a new event on the PlaylistLoader that signals
28027 // that the segments have changed in some way and use that to
28028 // update the SegmentLoader instead of doing it twice here and
28029 // on `mediachange`
28030
28031
28032 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
28033 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
28034 // as it is possible that it was temporarily stopped while waiting for
28035 // a playlist (e.g., in case the playlist errored and we re-requested it).
28036
28037 if (!this.tech_.paused()) {
28038 this.mainSegmentLoader_.load();
28039
28040 if (this.audioSegmentLoader_) {
28041 this.audioSegmentLoader_.load();
28042 }
28043 }
28044 }
28045 /**
28046 * A helper function for triggerring presence usage events once per source
28047 *
28048 * @private
28049 */
28050 ;
28051
28052 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
28053 var mediaGroups = master.mediaGroups || {};
28054 var defaultDemuxed = true;
28055 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
28056
28057 for (var mediaGroup in mediaGroups.AUDIO) {
28058 for (var label in mediaGroups.AUDIO[mediaGroup]) {
28059 var properties = mediaGroups.AUDIO[mediaGroup][label];
28060
28061 if (!properties.uri) {
28062 defaultDemuxed = false;
28063 }
28064 }
28065 }
28066
28067 if (defaultDemuxed) {
28068 this.tech_.trigger({
28069 type: 'usage',
28070 name: 'vhs-demuxed'
28071 });
28072 this.tech_.trigger({
28073 type: 'usage',
28074 name: 'hls-demuxed'
28075 });
28076 }
28077
28078 if (Object.keys(mediaGroups.SUBTITLES).length) {
28079 this.tech_.trigger({
28080 type: 'usage',
28081 name: 'vhs-webvtt'
28082 });
28083 this.tech_.trigger({
28084 type: 'usage',
28085 name: 'hls-webvtt'
28086 });
28087 }
28088
28089 if (Vhs$1.Playlist.isAes(media)) {
28090 this.tech_.trigger({
28091 type: 'usage',
28092 name: 'vhs-aes'
28093 });
28094 this.tech_.trigger({
28095 type: 'usage',
28096 name: 'hls-aes'
28097 });
28098 }
28099
28100 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
28101 this.tech_.trigger({
28102 type: 'usage',
28103 name: 'vhs-alternate-audio'
28104 });
28105 this.tech_.trigger({
28106 type: 'usage',
28107 name: 'hls-alternate-audio'
28108 });
28109 }
28110
28111 if (this.useCueTags_) {
28112 this.tech_.trigger({
28113 type: 'usage',
28114 name: 'vhs-playlist-cue-tags'
28115 });
28116 this.tech_.trigger({
28117 type: 'usage',
28118 name: 'hls-playlist-cue-tags'
28119 });
28120 }
28121 };
28122
28123 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
28124 var currentPlaylist = this.masterPlaylistLoader_.media();
28125 var buffered = this.tech_.buffered();
28126 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - this.tech_.currentTime() : 0;
28127 var bufferLowWaterLine = this.bufferLowWaterLine();
28128 var bufferHighWaterLine = this.bufferHighWaterLine();
28129 return shouldSwitchToMedia({
28130 currentPlaylist: currentPlaylist,
28131 nextPlaylist: nextPlaylist,
28132 forwardBuffer: forwardBuffer,
28133 bufferLowWaterLine: bufferLowWaterLine,
28134 bufferHighWaterLine: bufferHighWaterLine,
28135 duration: this.duration(),
28136 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
28137 log: this.logger_
28138 });
28139 }
28140 /**
28141 * Register event handlers on the segment loaders. A helper function
28142 * for construction time.
28143 *
28144 * @private
28145 */
28146 ;
28147
28148 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
28149 var _this4 = this;
28150
28151 if (!this.experimentalBufferBasedABR) {
28152 this.mainSegmentLoader_.on('bandwidthupdate', function () {
28153 var nextPlaylist = _this4.selectPlaylist();
28154
28155 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
28156 _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
28157 }
28158
28159 _this4.tech_.trigger('bandwidthupdate');
28160 });
28161 this.mainSegmentLoader_.on('progress', function () {
28162 _this4.trigger('progress');
28163 });
28164 }
28165
28166 this.mainSegmentLoader_.on('error', function () {
28167 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
28168 });
28169 this.mainSegmentLoader_.on('appenderror', function () {
28170 _this4.error = _this4.mainSegmentLoader_.error_;
28171
28172 _this4.trigger('error');
28173 });
28174 this.mainSegmentLoader_.on('syncinfoupdate', function () {
28175 _this4.onSyncInfoUpdate_();
28176 });
28177 this.mainSegmentLoader_.on('timestampoffset', function () {
28178 _this4.tech_.trigger({
28179 type: 'usage',
28180 name: 'vhs-timestamp-offset'
28181 });
28182
28183 _this4.tech_.trigger({
28184 type: 'usage',
28185 name: 'hls-timestamp-offset'
28186 });
28187 });
28188 this.audioSegmentLoader_.on('syncinfoupdate', function () {
28189 _this4.onSyncInfoUpdate_();
28190 });
28191 this.audioSegmentLoader_.on('appenderror', function () {
28192 _this4.error = _this4.audioSegmentLoader_.error_;
28193
28194 _this4.trigger('error');
28195 });
28196 this.mainSegmentLoader_.on('ended', function () {
28197 _this4.logger_('main segment loader ended');
28198
28199 _this4.onEndOfStream();
28200 });
28201 this.mainSegmentLoader_.on('earlyabort', function (event) {
28202 // never try to early abort with the new ABR algorithm
28203 if (_this4.experimentalBufferBasedABR) {
28204 return;
28205 }
28206
28207 _this4.delegateLoaders_('all', ['abort']);
28208
28209 _this4.blacklistCurrentPlaylist({
28210 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
28211 }, ABORT_EARLY_BLACKLIST_SECONDS);
28212 });
28213
28214 var updateCodecs = function updateCodecs() {
28215 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
28216 return _this4.tryToCreateSourceBuffers_();
28217 }
28218
28219 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
28220
28221
28222 if (!codecs) {
28223 return;
28224 }
28225
28226 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
28227 };
28228
28229 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
28230 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
28231 this.mainSegmentLoader_.on('fmp4', function () {
28232 if (!_this4.triggeredFmp4Usage) {
28233 _this4.tech_.trigger({
28234 type: 'usage',
28235 name: 'vhs-fmp4'
28236 });
28237
28238 _this4.tech_.trigger({
28239 type: 'usage',
28240 name: 'hls-fmp4'
28241 });
28242
28243 _this4.triggeredFmp4Usage = true;
28244 }
28245 });
28246 this.audioSegmentLoader_.on('fmp4', function () {
28247 if (!_this4.triggeredFmp4Usage) {
28248 _this4.tech_.trigger({
28249 type: 'usage',
28250 name: 'vhs-fmp4'
28251 });
28252
28253 _this4.tech_.trigger({
28254 type: 'usage',
28255 name: 'hls-fmp4'
28256 });
28257
28258 _this4.triggeredFmp4Usage = true;
28259 }
28260 });
28261 this.audioSegmentLoader_.on('ended', function () {
28262 _this4.logger_('audioSegmentLoader ended');
28263
28264 _this4.onEndOfStream();
28265 });
28266 };
28267
28268 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
28269 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
28270 }
28271 /**
28272 * Call load on our SegmentLoaders
28273 */
28274 ;
28275
28276 _proto.load = function load() {
28277 this.mainSegmentLoader_.load();
28278
28279 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28280 this.audioSegmentLoader_.load();
28281 }
28282
28283 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
28284 this.subtitleSegmentLoader_.load();
28285 }
28286 }
28287 /**
28288 * Re-tune playback quality level for the current player
28289 * conditions without performing destructive actions, like
28290 * removing already buffered content
28291 *
28292 * @private
28293 */
28294 ;
28295
28296 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
28297 if (media === void 0) {
28298 media = this.selectPlaylist();
28299 }
28300
28301 if (media === this.masterPlaylistLoader_.media()) {
28302 return;
28303 }
28304
28305 this.switchMedia_(media, 'smooth-quality');
28306 this.mainSegmentLoader_.resetLoader(); // don't need to reset audio as it is reset when media changes
28307 }
28308 /**
28309 * Re-tune playback quality level for the current player
28310 * conditions. This method will perform destructive actions like removing
28311 * already buffered content in order to readjust the currently active
28312 * playlist quickly. This is good for manual quality changes
28313 *
28314 * @private
28315 */
28316 ;
28317
28318 _proto.fastQualityChange_ = function fastQualityChange_(media) {
28319 var _this5 = this;
28320
28321 if (media === void 0) {
28322 media = this.selectPlaylist();
28323 }
28324
28325 if (media === this.masterPlaylistLoader_.media()) {
28326 this.logger_('skipping fastQualityChange because new media is same as old');
28327 return;
28328 }
28329
28330 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
28331 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
28332 // ahead is roughly the minimum that will accomplish this across a variety of content
28333 // in IE and Edge, but seeking in place is sufficient on all other browsers)
28334 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
28335 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
28336
28337 this.mainSegmentLoader_.resetEverything(function () {
28338 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
28339 // from the previously enabled rendition to load before the new playlist has finished loading
28340 if (videojs__default['default'].browser.IE_VERSION || videojs__default['default'].browser.IS_EDGE) {
28341 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
28342 } else {
28343 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
28344 }
28345 }); // don't need to reset audio as it is reset when media changes
28346 }
28347 /**
28348 * Begin playback.
28349 */
28350 ;
28351
28352 _proto.play = function play() {
28353 if (this.setupFirstPlay()) {
28354 return;
28355 }
28356
28357 if (this.tech_.ended()) {
28358 this.tech_.setCurrentTime(0);
28359 }
28360
28361 if (this.hasPlayed_) {
28362 this.load();
28363 }
28364
28365 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
28366 // seek forward to the live point
28367
28368 if (this.tech_.duration() === Infinity) {
28369 if (this.tech_.currentTime() < seekable.start(0)) {
28370 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
28371 }
28372 }
28373 }
28374 /**
28375 * Seek to the latest media position if this is a live video and the
28376 * player and video are loaded and initialized.
28377 */
28378 ;
28379
28380 _proto.setupFirstPlay = function setupFirstPlay() {
28381 var _this6 = this;
28382
28383 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
28384 // If 1) there is no active media
28385 // 2) the player is paused
28386 // 3) the first play has already been setup
28387 // then exit early
28388
28389 if (!media || this.tech_.paused() || this.hasPlayed_) {
28390 return false;
28391 } // when the video is a live stream
28392
28393
28394 if (!media.endList) {
28395 var seekable = this.seekable();
28396
28397 if (!seekable.length) {
28398 // without a seekable range, the player cannot seek to begin buffering at the live
28399 // point
28400 return false;
28401 }
28402
28403 if (videojs__default['default'].browser.IE_VERSION && this.tech_.readyState() === 0) {
28404 // IE11 throws an InvalidStateError if you try to set currentTime while the
28405 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
28406 this.tech_.one('loadedmetadata', function () {
28407 _this6.trigger('firstplay');
28408
28409 _this6.tech_.setCurrentTime(seekable.end(0));
28410
28411 _this6.hasPlayed_ = true;
28412 });
28413 return false;
28414 } // trigger firstplay to inform the source handler to ignore the next seek event
28415
28416
28417 this.trigger('firstplay'); // seek to the live point
28418
28419 this.tech_.setCurrentTime(seekable.end(0));
28420 }
28421
28422 this.hasPlayed_ = true; // we can begin loading now that everything is ready
28423
28424 this.load();
28425 return true;
28426 }
28427 /**
28428 * handle the sourceopen event on the MediaSource
28429 *
28430 * @private
28431 */
28432 ;
28433
28434 _proto.handleSourceOpen_ = function handleSourceOpen_() {
28435 // Only attempt to create the source buffer if none already exist.
28436 // handleSourceOpen is also called when we are "re-opening" a source buffer
28437 // after `endOfStream` has been called (in response to a seek for instance)
28438 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
28439 // code in video.js but is required because play() must be invoked
28440 // *after* the media source has opened.
28441
28442 if (this.tech_.autoplay()) {
28443 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
28444 // on browsers which return a promise
28445
28446 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
28447 playPromise.then(null, function (e) {});
28448 }
28449 }
28450
28451 this.trigger('sourceopen');
28452 }
28453 /**
28454 * handle the sourceended event on the MediaSource
28455 *
28456 * @private
28457 */
28458 ;
28459
28460 _proto.handleSourceEnded_ = function handleSourceEnded_() {
28461 if (!this.inbandTextTracks_.metadataTrack_) {
28462 return;
28463 }
28464
28465 var cues = this.inbandTextTracks_.metadataTrack_.cues;
28466
28467 if (!cues || !cues.length) {
28468 return;
28469 }
28470
28471 var duration = this.duration();
28472 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
28473 }
28474 /**
28475 * handle the durationchange event on the MediaSource
28476 *
28477 * @private
28478 */
28479 ;
28480
28481 _proto.handleDurationChange_ = function handleDurationChange_() {
28482 this.tech_.trigger('durationchange');
28483 }
28484 /**
28485 * Calls endOfStream on the media source when all active stream types have called
28486 * endOfStream
28487 *
28488 * @param {string} streamType
28489 * Stream type of the segment loader that called endOfStream
28490 * @private
28491 */
28492 ;
28493
28494 _proto.onEndOfStream = function onEndOfStream() {
28495 var isEndOfStream = this.mainSegmentLoader_.ended_;
28496
28497 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28498 // if the audio playlist loader exists, then alternate audio is active
28499 if (!this.mainSegmentLoader_.currentMediaInfo_ || this.mainSegmentLoader_.currentMediaInfo_.hasVideo) {
28500 // if we do not know if the main segment loader contains video yet or if we
28501 // definitively know the main segment loader contains video, then we need to wait
28502 // for both main and audio segment loaders to call endOfStream
28503 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
28504 } else {
28505 // otherwise just rely on the audio loader
28506 isEndOfStream = this.audioSegmentLoader_.ended_;
28507 }
28508 }
28509
28510 if (!isEndOfStream) {
28511 return;
28512 }
28513
28514 this.stopABRTimer_();
28515 this.sourceUpdater_.endOfStream();
28516 }
28517 /**
28518 * Check if a playlist has stopped being updated
28519 *
28520 * @param {Object} playlist the media playlist object
28521 * @return {boolean} whether the playlist has stopped being updated or not
28522 */
28523 ;
28524
28525 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
28526 var seekable = this.seekable();
28527
28528 if (!seekable.length) {
28529 // playlist doesn't have enough information to determine whether we are stuck
28530 return false;
28531 }
28532
28533 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
28534
28535 if (expired === null) {
28536 return false;
28537 } // does not use the safe live end to calculate playlist end, since we
28538 // don't want to say we are stuck while there is still content
28539
28540
28541 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
28542 var currentTime = this.tech_.currentTime();
28543 var buffered = this.tech_.buffered();
28544
28545 if (!buffered.length) {
28546 // return true if the playhead reached the absolute end of the playlist
28547 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
28548 }
28549
28550 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
28551 // end of playlist
28552
28553 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
28554 }
28555 /**
28556 * Blacklists a playlist when an error occurs for a set amount of time
28557 * making it unavailable for selection by the rendition selection algorithm
28558 * and then forces a new playlist (rendition) selection.
28559 *
28560 * @param {Object=} error an optional error that may include the playlist
28561 * to blacklist
28562 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
28563 * playlist
28564 */
28565 ;
28566
28567 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
28568 if (error === void 0) {
28569 error = {};
28570 }
28571
28572 // If the `error` was generated by the playlist loader, it will contain
28573 // the playlist we were trying to load (but failed) and that should be
28574 // blacklisted instead of the currently selected playlist which is likely
28575 // out-of-date in this scenario
28576 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
28577 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
28578 // trying to load the master OR while we were disposing of the tech
28579
28580 if (!currentPlaylist) {
28581 this.error = error;
28582
28583 if (this.mediaSource.readyState !== 'open') {
28584 this.trigger('error');
28585 } else {
28586 this.sourceUpdater_.endOfStream('network');
28587 }
28588
28589 return;
28590 }
28591
28592 var playlists = this.masterPlaylistLoader_.master.playlists;
28593 var enabledPlaylists = playlists.filter(isEnabled);
28594 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
28595 // forever
28596
28597 if (playlists.length === 1 && blacklistDuration !== Infinity) {
28598 videojs__default['default'].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
28599 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
28600
28601 return this.masterPlaylistLoader_.load(isFinalRendition);
28602 }
28603
28604 if (isFinalRendition) {
28605 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
28606 // it, instead of erring the player or retrying this playlist, clear out the current
28607 // blacklist. This allows other playlists to be attempted in case any have been
28608 // fixed.
28609 var reincluded = false;
28610 playlists.forEach(function (playlist) {
28611 // skip current playlist which is about to be blacklisted
28612 if (playlist === currentPlaylist) {
28613 return;
28614 }
28615
28616 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
28617
28618 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
28619 reincluded = true;
28620 delete playlist.excludeUntil;
28621 }
28622 });
28623
28624 if (reincluded) {
28625 videojs__default['default'].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
28626 // playlist. This is needed for users relying on the retryplaylist event to catch a
28627 // case where the player might be stuck and looping through "dead" playlists.
28628
28629 this.tech_.trigger('retryplaylist');
28630 }
28631 } // Blacklist this playlist
28632
28633
28634 currentPlaylist.excludeUntil = Date.now() + blacklistDuration * 1000;
28635
28636 if (error.reason) {
28637 currentPlaylist.lastExcludeReason_ = error.reason;
28638 }
28639
28640 this.tech_.trigger('blacklistplaylist');
28641 this.tech_.trigger({
28642 type: 'usage',
28643 name: 'vhs-rendition-blacklisted'
28644 });
28645 this.tech_.trigger({
28646 type: 'usage',
28647 name: 'hls-rendition-blacklisted'
28648 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
28649 // Would be something like media().id !=== currentPlaylist.id and we would need something
28650 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
28651 // from loading a new playlist on any blacklist.
28652 // Select a new playlist
28653
28654 var nextPlaylist = this.selectPlaylist();
28655
28656 if (!nextPlaylist) {
28657 this.error = 'Playback cannot continue. No available working or supported playlists.';
28658 this.trigger('error');
28659 return;
28660 }
28661
28662 var logFn = error.internal ? this.logger_ : videojs__default['default'].log.warn;
28663 var errorMessage = error.message ? ' ' + error.message : '';
28664 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
28665
28666 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
28667 this.delegateLoaders_('audio', ['abort', 'pause']);
28668 } // if subtitle group changed reset subtitle loaders
28669
28670
28671 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
28672 this.delegateLoaders_('subtitle', ['abort', 'pause']);
28673 }
28674
28675 this.delegateLoaders_('main', ['abort', 'pause']);
28676 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
28677 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
28678
28679 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
28680 }
28681 /**
28682 * Pause all segment/playlist loaders
28683 */
28684 ;
28685
28686 _proto.pauseLoading = function pauseLoading() {
28687 this.delegateLoaders_('all', ['abort', 'pause']);
28688 this.stopABRTimer_();
28689 }
28690 /**
28691 * Call a set of functions in order on playlist loaders, segment loaders,
28692 * or both types of loaders.
28693 *
28694 * @param {string} filter
28695 * Filter loaders that should call fnNames using a string. Can be:
28696 * * all - run on all loaders
28697 * * audio - run on all audio loaders
28698 * * subtitle - run on all subtitle loaders
28699 * * main - run on the main/master loaders
28700 *
28701 * @param {Array|string} fnNames
28702 * A string or array of function names to call.
28703 */
28704 ;
28705
28706 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
28707 var _this7 = this;
28708
28709 var loaders = [];
28710 var dontFilterPlaylist = filter === 'all';
28711
28712 if (dontFilterPlaylist || filter === 'main') {
28713 loaders.push(this.masterPlaylistLoader_);
28714 }
28715
28716 var mediaTypes = [];
28717
28718 if (dontFilterPlaylist || filter === 'audio') {
28719 mediaTypes.push('AUDIO');
28720 }
28721
28722 if (dontFilterPlaylist || filter === 'subtitle') {
28723 mediaTypes.push('CLOSED-CAPTIONS');
28724 mediaTypes.push('SUBTITLES');
28725 }
28726
28727 mediaTypes.forEach(function (mediaType) {
28728 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
28729
28730 if (loader) {
28731 loaders.push(loader);
28732 }
28733 });
28734 ['main', 'audio', 'subtitle'].forEach(function (name) {
28735 var loader = _this7[name + "SegmentLoader_"];
28736
28737 if (loader && (filter === name || filter === 'all')) {
28738 loaders.push(loader);
28739 }
28740 });
28741 loaders.forEach(function (loader) {
28742 return fnNames.forEach(function (fnName) {
28743 if (typeof loader[fnName] === 'function') {
28744 loader[fnName]();
28745 }
28746 });
28747 });
28748 }
28749 /**
28750 * set the current time on all segment loaders
28751 *
28752 * @param {TimeRange} currentTime the current time to set
28753 * @return {TimeRange} the current time
28754 */
28755 ;
28756
28757 _proto.setCurrentTime = function setCurrentTime(currentTime) {
28758 var buffered = findRange(this.tech_.buffered(), currentTime);
28759
28760 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
28761 // return immediately if the metadata is not ready yet
28762 return 0;
28763 } // it's clearly an edge-case but don't thrown an error if asked to
28764 // seek within an empty playlist
28765
28766
28767 if (!this.masterPlaylistLoader_.media().segments) {
28768 return 0;
28769 } // if the seek location is already buffered, continue buffering as usual
28770
28771
28772 if (buffered && buffered.length) {
28773 return currentTime;
28774 } // cancel outstanding requests so we begin buffering at the new
28775 // location
28776
28777
28778 this.mainSegmentLoader_.resetEverything();
28779 this.mainSegmentLoader_.abort();
28780
28781 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28782 this.audioSegmentLoader_.resetEverything();
28783 this.audioSegmentLoader_.abort();
28784 }
28785
28786 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
28787 this.subtitleSegmentLoader_.resetEverything();
28788 this.subtitleSegmentLoader_.abort();
28789 } // start segment loader loading in case they are paused
28790
28791
28792 this.load();
28793 }
28794 /**
28795 * get the current duration
28796 *
28797 * @return {TimeRange} the duration
28798 */
28799 ;
28800
28801 _proto.duration = function duration() {
28802 if (!this.masterPlaylistLoader_) {
28803 return 0;
28804 }
28805
28806 var media = this.masterPlaylistLoader_.media();
28807
28808 if (!media) {
28809 // no playlists loaded yet, so can't determine a duration
28810 return 0;
28811 } // Don't rely on the media source for duration in the case of a live playlist since
28812 // setting the native MediaSource's duration to infinity ends up with consequences to
28813 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
28814 //
28815 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
28816 // however, few browsers have support for setLiveSeekableRange()
28817 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
28818 //
28819 // Until a time when the duration of the media source can be set to infinity, and a
28820 // seekable range specified across browsers, just return Infinity.
28821
28822
28823 if (!media.endList) {
28824 return Infinity;
28825 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
28826 // available). If it's not available, fall back to a playlist-calculated estimate.
28827
28828
28829 if (this.mediaSource) {
28830 return this.mediaSource.duration;
28831 }
28832
28833 return Vhs$1.Playlist.duration(media);
28834 }
28835 /**
28836 * check the seekable range
28837 *
28838 * @return {TimeRange} the seekable range
28839 */
28840 ;
28841
28842 _proto.seekable = function seekable() {
28843 return this.seekable_;
28844 };
28845
28846 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
28847 var audioSeekable;
28848
28849 if (!this.masterPlaylistLoader_) {
28850 return;
28851 }
28852
28853 var media = this.masterPlaylistLoader_.media();
28854
28855 if (!media) {
28856 return;
28857 }
28858
28859 var expired = this.syncController_.getExpiredTime(media, this.duration());
28860
28861 if (expired === null) {
28862 // not enough information to update seekable
28863 return;
28864 }
28865
28866 var master = this.masterPlaylistLoader_.master;
28867 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
28868
28869 if (mainSeekable.length === 0) {
28870 return;
28871 }
28872
28873 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
28874 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
28875 expired = this.syncController_.getExpiredTime(media, this.duration());
28876
28877 if (expired === null) {
28878 return;
28879 }
28880
28881 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
28882
28883 if (audioSeekable.length === 0) {
28884 return;
28885 }
28886 }
28887
28888 var oldEnd;
28889 var oldStart;
28890
28891 if (this.seekable_ && this.seekable_.length) {
28892 oldEnd = this.seekable_.end(0);
28893 oldStart = this.seekable_.start(0);
28894 }
28895
28896 if (!audioSeekable) {
28897 // seekable has been calculated based on buffering video data so it
28898 // can be returned directly
28899 this.seekable_ = mainSeekable;
28900 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
28901 // seekables are pretty far off, rely on main
28902 this.seekable_ = mainSeekable;
28903 } else {
28904 this.seekable_ = videojs__default['default'].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
28905 } // seekable is the same as last time
28906
28907
28908 if (this.seekable_ && this.seekable_.length) {
28909 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
28910 return;
28911 }
28912 }
28913
28914 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
28915 this.tech_.trigger('seekablechanged');
28916 }
28917 /**
28918 * Update the player duration
28919 */
28920 ;
28921
28922 _proto.updateDuration = function updateDuration(isLive) {
28923 if (this.updateDuration_) {
28924 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
28925 this.updateDuration_ = null;
28926 }
28927
28928 if (this.mediaSource.readyState !== 'open') {
28929 this.updateDuration_ = this.updateDuration.bind(this, isLive);
28930 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
28931 return;
28932 }
28933
28934 if (isLive) {
28935 var seekable = this.seekable();
28936
28937 if (!seekable.length) {
28938 return;
28939 } // Even in the case of a live playlist, the native MediaSource's duration should not
28940 // be set to Infinity (even though this would be expected for a live playlist), since
28941 // setting the native MediaSource's duration to infinity ends up with consequences to
28942 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
28943 //
28944 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
28945 // however, few browsers have support for setLiveSeekableRange()
28946 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
28947 //
28948 // Until a time when the duration of the media source can be set to infinity, and a
28949 // seekable range specified across browsers, the duration should be greater than or
28950 // equal to the last possible seekable value.
28951 // MediaSource duration starts as NaN
28952 // It is possible (and probable) that this case will never be reached for many
28953 // sources, since the MediaSource reports duration as the highest value without
28954 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
28955 // we buffered times 0 to 100 with real times of 100 to 200, even though current
28956 // time will be between 0 and 100, the native media source may report the duration
28957 // as 200. However, since we report duration separate from the media source (as
28958 // Infinity), and as long as the native media source duration value is greater than
28959 // our reported seekable range, seeks will work as expected. The large number as
28960 // duration for live is actually a strategy used by some players to work around the
28961 // issue of live seekable ranges cited above.
28962
28963
28964 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
28965 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
28966 }
28967
28968 return;
28969 }
28970
28971 var buffered = this.tech_.buffered();
28972 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
28973
28974 if (buffered.length > 0) {
28975 duration = Math.max(duration, buffered.end(buffered.length - 1));
28976 }
28977
28978 if (this.mediaSource.duration !== duration) {
28979 this.sourceUpdater_.setDuration(duration);
28980 }
28981 }
28982 /**
28983 * dispose of the MasterPlaylistController and everything
28984 * that it controls
28985 */
28986 ;
28987
28988 _proto.dispose = function dispose() {
28989 var _this8 = this;
28990
28991 this.trigger('dispose');
28992 this.decrypter_.terminate();
28993 this.masterPlaylistLoader_.dispose();
28994 this.mainSegmentLoader_.dispose();
28995
28996 if (this.loadOnPlay_) {
28997 this.tech_.off('play', this.loadOnPlay_);
28998 }
28999
29000 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
29001 var groups = _this8.mediaTypes_[type].groups;
29002
29003 for (var id in groups) {
29004 groups[id].forEach(function (group) {
29005 if (group.playlistLoader) {
29006 group.playlistLoader.dispose();
29007 }
29008 });
29009 }
29010 });
29011 this.audioSegmentLoader_.dispose();
29012 this.subtitleSegmentLoader_.dispose();
29013 this.sourceUpdater_.dispose();
29014 this.timelineChangeController_.dispose();
29015 this.stopABRTimer_();
29016
29017 if (this.updateDuration_) {
29018 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29019 }
29020
29021 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
29022
29023 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
29024 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
29025 this.off();
29026 }
29027 /**
29028 * return the master playlist object if we have one
29029 *
29030 * @return {Object} the master playlist object that we parsed
29031 */
29032 ;
29033
29034 _proto.master = function master() {
29035 return this.masterPlaylistLoader_.master;
29036 }
29037 /**
29038 * return the currently selected playlist
29039 *
29040 * @return {Object} the currently selected playlist object that we parsed
29041 */
29042 ;
29043
29044 _proto.media = function media() {
29045 // playlist loader will not return media if it has not been fully loaded
29046 return this.masterPlaylistLoader_.media() || this.initialMedia_;
29047 };
29048
29049 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
29050 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader; // one or both loaders has not loaded sufficently to get codecs
29051
29052 if (!this.mainSegmentLoader_.currentMediaInfo_ || usingAudioLoader && !this.audioSegmentLoader_.currentMediaInfo_) {
29053 return false;
29054 }
29055
29056 return true;
29057 };
29058
29059 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
29060 var _this9 = this;
29061
29062 var media = {
29063 main: this.mainSegmentLoader_.currentMediaInfo_ || {},
29064 audio: this.audioSegmentLoader_.currentMediaInfo_ || {}
29065 }; // set "main" media equal to video
29066
29067 media.video = media.main;
29068 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
29069 var codecs = {};
29070 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
29071
29072 if (media.main.hasVideo) {
29073 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
29074 }
29075
29076 if (media.main.isMuxed) {
29077 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
29078 }
29079
29080 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
29081 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
29082
29083 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
29084 } // no codecs, no playback.
29085
29086
29087 if (!codecs.audio && !codecs.video) {
29088 this.blacklistCurrentPlaylist({
29089 playlist: this.media(),
29090 message: 'Could not determine codecs for playlist.',
29091 blacklistDuration: Infinity
29092 });
29093 return;
29094 } // fmp4 relies on browser support, while ts relies on muxer support
29095
29096
29097 var supportFunction = function supportFunction(isFmp4, codec) {
29098 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
29099 };
29100
29101 var unsupportedCodecs = {};
29102 var unsupportedAudio;
29103 ['video', 'audio'].forEach(function (type) {
29104 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
29105 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
29106 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
29107 unsupportedCodecs[supporter].push(codecs[type]);
29108
29109 if (type === 'audio') {
29110 unsupportedAudio = supporter;
29111 }
29112 }
29113 });
29114
29115 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
29116 var audioGroup = this.media().attributes.AUDIO;
29117 this.master().playlists.forEach(function (variant) {
29118 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
29119
29120 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
29121 variant.excludeUntil = Infinity;
29122 }
29123 });
29124 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
29125 } // if we have any unsupported codecs blacklist this playlist.
29126
29127
29128 if (Object.keys(unsupportedCodecs).length) {
29129 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
29130 if (acc) {
29131 acc += ', ';
29132 }
29133
29134 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
29135 return acc;
29136 }, '') + '.';
29137 this.blacklistCurrentPlaylist({
29138 playlist: this.media(),
29139 internal: true,
29140 message: message,
29141 blacklistDuration: Infinity
29142 });
29143 return;
29144 } // check if codec switching is happening
29145
29146
29147 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
29148 var switchMessages = [];
29149 ['video', 'audio'].forEach(function (type) {
29150 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
29151 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
29152
29153 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
29154 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
29155 }
29156 });
29157
29158 if (switchMessages.length) {
29159 this.blacklistCurrentPlaylist({
29160 playlist: this.media(),
29161 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
29162 blacklistDuration: Infinity,
29163 internal: true
29164 });
29165 return;
29166 }
29167 } // TODO: when using the muxer shouldn't we just return
29168 // the codecs that the muxer outputs?
29169
29170
29171 return codecs;
29172 }
29173 /**
29174 * Create source buffers and exlude any incompatible renditions.
29175 *
29176 * @private
29177 */
29178 ;
29179
29180 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
29181 // media source is not ready yet or sourceBuffers are already
29182 // created.
29183 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
29184 return;
29185 }
29186
29187 if (!this.areMediaTypesKnown_()) {
29188 return;
29189 }
29190
29191 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
29192
29193 if (!codecs) {
29194 return;
29195 }
29196
29197 this.sourceUpdater_.createSourceBuffers(codecs);
29198 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
29199 this.excludeIncompatibleVariants_(codecString);
29200 }
29201 /**
29202 * Excludes playlists with codecs that are unsupported by the muxer and browser.
29203 */
29204 ;
29205
29206 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
29207 var _this10 = this;
29208
29209 var playlists = this.master().playlists;
29210 var ids = []; // TODO: why don't we have a property to loop through all
29211 // playlist? Why did we ever mix indexes and keys?
29212
29213 Object.keys(playlists).forEach(function (key) {
29214 var variant = playlists[key]; // check if we already processed this playlist.
29215
29216 if (ids.indexOf(variant.id) !== -1) {
29217 return;
29218 }
29219
29220 ids.push(variant.id);
29221 var codecs = codecsForPlaylist(_this10.master, variant);
29222 var unsupported = [];
29223
29224 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
29225 unsupported.push("audio codec " + codecs.audio);
29226 }
29227
29228 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
29229 unsupported.push("video codec " + codecs.video);
29230 }
29231
29232 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
29233 unsupported.push("text codec " + codecs.text);
29234 }
29235
29236 if (unsupported.length) {
29237 variant.excludeUntil = Infinity;
29238
29239 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
29240 }
29241 });
29242 }
29243 /**
29244 * Blacklist playlists that are known to be codec or
29245 * stream-incompatible with the SourceBuffer configuration. For
29246 * instance, Media Source Extensions would cause the video element to
29247 * stall waiting for video data if you switched from a variant with
29248 * video and audio to an audio-only one.
29249 *
29250 * @param {Object} media a media playlist compatible with the current
29251 * set of SourceBuffers. Variants in the current master playlist that
29252 * do not appear to have compatible codec or stream configurations
29253 * will be excluded from the default playlist selection algorithm
29254 * indefinitely.
29255 * @private
29256 */
29257 ;
29258
29259 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
29260 var _this11 = this;
29261
29262 var ids = [];
29263 var playlists = this.master().playlists;
29264 var codecs = unwrapCodecList(parseCodecs(codecString));
29265 var codecCount_ = codecCount(codecs);
29266 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
29267 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
29268 Object.keys(playlists).forEach(function (key) {
29269 var variant = playlists[key]; // check if we already processed this playlist.
29270 // or it if it is already excluded forever.
29271
29272 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
29273 return;
29274 }
29275
29276 ids.push(variant.id);
29277 var blacklistReasons = []; // get codecs from the playlist for this variant
29278
29279 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
29280 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
29281 // variant is incompatible. Wait for mux.js to probe
29282
29283 if (!variantCodecs.audio && !variantCodecs.video) {
29284 return;
29285 } // TODO: we can support this by removing the
29286 // old media source and creating a new one, but it will take some work.
29287 // The number of streams cannot change
29288
29289
29290 if (variantCodecCount !== codecCount_) {
29291 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
29292 } // only exclude playlists by codec change, if codecs cannot switch
29293 // during playback.
29294
29295
29296 if (!_this11.sourceUpdater_.canChangeType()) {
29297 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
29298 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
29299
29300 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
29301 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
29302 } // the audio codec cannot change
29303
29304
29305 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
29306 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
29307 }
29308 }
29309
29310 if (blacklistReasons.length) {
29311 variant.excludeUntil = Infinity;
29312
29313 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
29314 }
29315 });
29316 };
29317
29318 _proto.updateAdCues_ = function updateAdCues_(media) {
29319 var offset = 0;
29320 var seekable = this.seekable();
29321
29322 if (seekable.length) {
29323 offset = seekable.start(0);
29324 }
29325
29326 updateAdCues(media, this.cueTagsTrack_, offset);
29327 }
29328 /**
29329 * Calculates the desired forward buffer length based on current time
29330 *
29331 * @return {number} Desired forward buffer length in seconds
29332 */
29333 ;
29334
29335 _proto.goalBufferLength = function goalBufferLength() {
29336 var currentTime = this.tech_.currentTime();
29337 var initial = Config.GOAL_BUFFER_LENGTH;
29338 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
29339 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
29340 return Math.min(initial + currentTime * rate, max);
29341 }
29342 /**
29343 * Calculates the desired buffer low water line based on current time
29344 *
29345 * @return {number} Desired buffer low water line in seconds
29346 */
29347 ;
29348
29349 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
29350 var currentTime = this.tech_.currentTime();
29351 var initial = Config.BUFFER_LOW_WATER_LINE;
29352 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
29353 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
29354 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
29355 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
29356 };
29357
29358 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
29359 return Config.BUFFER_HIGH_WATER_LINE;
29360 };
29361
29362 return MasterPlaylistController;
29363 }(videojs__default['default'].EventTarget);
29364
29365 /**
29366 * Returns a function that acts as the Enable/disable playlist function.
29367 *
29368 * @param {PlaylistLoader} loader - The master playlist loader
29369 * @param {string} playlistID - id of the playlist
29370 * @param {Function} changePlaylistFn - A function to be called after a
29371 * playlist's enabled-state has been changed. Will NOT be called if a
29372 * playlist's enabled-state is unchanged
29373 * @param {boolean=} enable - Value to set the playlist enabled-state to
29374 * or if undefined returns the current enabled-state for the playlist
29375 * @return {Function} Function for setting/getting enabled
29376 */
29377
29378 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
29379 return function (enable) {
29380 var playlist = loader.master.playlists[playlistID];
29381 var incompatible = isIncompatible(playlist);
29382 var currentlyEnabled = isEnabled(playlist);
29383
29384 if (typeof enable === 'undefined') {
29385 return currentlyEnabled;
29386 }
29387
29388 if (enable) {
29389 delete playlist.disabled;
29390 } else {
29391 playlist.disabled = true;
29392 }
29393
29394 if (enable !== currentlyEnabled && !incompatible) {
29395 // Ensure the outside world knows about our changes
29396 changePlaylistFn();
29397
29398 if (enable) {
29399 loader.trigger('renditionenabled');
29400 } else {
29401 loader.trigger('renditiondisabled');
29402 }
29403 }
29404
29405 return enable;
29406 };
29407 };
29408 /**
29409 * The representation object encapsulates the publicly visible information
29410 * in a media playlist along with a setter/getter-type function (enabled)
29411 * for changing the enabled-state of a particular playlist entry
29412 *
29413 * @class Representation
29414 */
29415
29416
29417 var Representation = function Representation(vhsHandler, playlist, id) {
29418 var mpc = vhsHandler.masterPlaylistController_,
29419 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
29420
29421 var changeType = smoothQualityChange ? 'smooth' : 'fast';
29422 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
29423
29424 if (playlist.attributes) {
29425 var resolution = playlist.attributes.RESOLUTION;
29426 this.width = resolution && resolution.width;
29427 this.height = resolution && resolution.height;
29428 this.bandwidth = playlist.attributes.BANDWIDTH;
29429 }
29430
29431 this.codecs = codecsForPlaylist(mpc.master(), playlist);
29432 this.playlist = playlist; // The id is simply the ordinality of the media playlist
29433 // within the master playlist
29434
29435 this.id = id; // Partially-apply the enableFunction to create a playlist-
29436 // specific variant
29437
29438 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
29439 };
29440 /**
29441 * A mixin function that adds the `representations` api to an instance
29442 * of the VhsHandler class
29443 *
29444 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
29445 * representation API into
29446 */
29447
29448
29449 var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
29450 // Add a single API-specific function to the VhsHandler instance
29451 vhsHandler.representations = function () {
29452 var master = vhsHandler.masterPlaylistController_.master();
29453 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
29454
29455 if (!playlists) {
29456 return [];
29457 }
29458
29459 return playlists.filter(function (media) {
29460 return !isIncompatible(media);
29461 }).map(function (e, i) {
29462 return new Representation(vhsHandler, e, e.id);
29463 });
29464 };
29465 };
29466
29467 /**
29468 * @file playback-watcher.js
29469 *
29470 * Playback starts, and now my watch begins. It shall not end until my death. I shall
29471 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
29472 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
29473 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
29474 * my life and honor to the Playback Watch, for this Player and all the Players to come.
29475 */
29476
29477 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
29478 /**
29479 * Returns whether or not the current time should be considered close to buffered content,
29480 * taking into consideration whether there's enough buffered content for proper playback.
29481 *
29482 * @param {Object} options
29483 * Options object
29484 * @param {TimeRange} options.buffered
29485 * Current buffer
29486 * @param {number} options.targetDuration
29487 * The active playlist's target duration
29488 * @param {number} options.currentTime
29489 * The current time of the player
29490 * @return {boolean}
29491 * Whether the current time should be considered close to the buffer
29492 */
29493
29494 var closeToBufferedContent = function closeToBufferedContent(_ref) {
29495 var buffered = _ref.buffered,
29496 targetDuration = _ref.targetDuration,
29497 currentTime = _ref.currentTime;
29498
29499 if (!buffered.length) {
29500 return false;
29501 } // At least two to three segments worth of content should be buffered before there's a
29502 // full enough buffer to consider taking any actions.
29503
29504
29505 if (buffered.end(0) - buffered.start(0) < targetDuration * 2) {
29506 return false;
29507 } // It's possible that, on seek, a remove hasn't completed and the buffered range is
29508 // somewhere past the current time. In that event, don't consider the buffered content
29509 // close.
29510
29511
29512 if (currentTime > buffered.start(0)) {
29513 return false;
29514 } // Since target duration generally represents the max (or close to max) duration of a
29515 // segment, if the buffer is within a segment of the current time, the gap probably
29516 // won't be closed, and current time should be considered close to buffered content.
29517
29518
29519 return buffered.start(0) - currentTime < targetDuration;
29520 };
29521 /**
29522 * @class PlaybackWatcher
29523 */
29524
29525 var PlaybackWatcher = /*#__PURE__*/function () {
29526 /**
29527 * Represents an PlaybackWatcher object.
29528 *
29529 * @class
29530 * @param {Object} options an object that includes the tech and settings
29531 */
29532 function PlaybackWatcher(options) {
29533 var _this = this;
29534
29535 this.masterPlaylistController_ = options.masterPlaylistController;
29536 this.tech_ = options.tech;
29537 this.seekable = options.seekable;
29538 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
29539 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
29540 this.media = options.media;
29541 this.consecutiveUpdates = 0;
29542 this.lastRecordedTime = null;
29543 this.timer_ = null;
29544 this.checkCurrentTimeTimeout_ = null;
29545 this.logger_ = logger('PlaybackWatcher');
29546 this.logger_('initialize');
29547
29548 var playHandler = function playHandler() {
29549 return _this.monitorCurrentTime_();
29550 };
29551
29552 var canPlayHandler = function canPlayHandler() {
29553 return _this.monitorCurrentTime_();
29554 };
29555
29556 var waitingHandler = function waitingHandler() {
29557 return _this.techWaiting_();
29558 };
29559
29560 var cancelTimerHandler = function cancelTimerHandler() {
29561 return _this.cancelTimer_();
29562 };
29563
29564 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
29565 return _this.fixesBadSeeks_();
29566 };
29567
29568 var mpc = this.masterPlaylistController_;
29569 var loaderTypes = ['main', 'subtitle', 'audio'];
29570 var loaderChecks = {};
29571 loaderTypes.forEach(function (type) {
29572 loaderChecks[type] = {
29573 reset: function reset() {
29574 return _this.resetSegmentDownloads_(type);
29575 },
29576 updateend: function updateend() {
29577 return _this.checkSegmentDownloads_(type);
29578 }
29579 };
29580 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
29581 // isn't changing we want to reset. We cannot assume that the new rendition
29582 // will also be stalled, until after new appends.
29583
29584 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
29585 // This prevents one segment playlists (single vtt or single segment content)
29586 // from being detected as stalling. As the buffer will not change in those cases, since
29587 // the buffer is the entire video duration.
29588
29589 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
29590 });
29591 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
29592 this.tech_.on('waiting', waitingHandler);
29593 this.tech_.on(timerCancelEvents, cancelTimerHandler);
29594 this.tech_.on('canplay', canPlayHandler);
29595 /*
29596 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
29597 is surfaced in one of two ways:
29598 1) The `waiting` event is fired before the player has buffered content, making it impossible
29599 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
29600 we can check if playback is stalled due to a gap, and skip the gap if necessary.
29601 2) A source with a gap at the beginning of the stream is loaded programatically while the player
29602 is in a playing state. To catch this case, it's important that our one-time play listener is setup
29603 even if the player is in a playing state
29604 */
29605
29606 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
29607
29608 this.dispose = function () {
29609 _this.logger_('dispose');
29610
29611 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
29612
29613 _this.tech_.off('waiting', waitingHandler);
29614
29615 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
29616
29617 _this.tech_.off('canplay', canPlayHandler);
29618
29619 _this.tech_.off('play', playHandler);
29620
29621 loaderTypes.forEach(function (type) {
29622 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
29623 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
29624
29625 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
29626 });
29627
29628 if (_this.checkCurrentTimeTimeout_) {
29629 window__default['default'].clearTimeout(_this.checkCurrentTimeTimeout_);
29630 }
29631
29632 _this.cancelTimer_();
29633 };
29634 }
29635 /**
29636 * Periodically check current time to see if playback stopped
29637 *
29638 * @private
29639 */
29640
29641
29642 var _proto = PlaybackWatcher.prototype;
29643
29644 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
29645 this.checkCurrentTime_();
29646
29647 if (this.checkCurrentTimeTimeout_) {
29648 window__default['default'].clearTimeout(this.checkCurrentTimeTimeout_);
29649 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
29650
29651
29652 this.checkCurrentTimeTimeout_ = window__default['default'].setTimeout(this.monitorCurrentTime_.bind(this), 250);
29653 }
29654 /**
29655 * Reset stalled download stats for a specific type of loader
29656 *
29657 * @param {string} type
29658 * The segment loader type to check.
29659 *
29660 * @listens SegmentLoader#playlistupdate
29661 * @listens Tech#seeking
29662 * @listens Tech#seeked
29663 */
29664 ;
29665
29666 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
29667 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
29668
29669 if (this[type + "StalledDownloads_"] > 0) {
29670 this.logger_("resetting possible stalled download count for " + type + " loader");
29671 }
29672
29673 this[type + "StalledDownloads_"] = 0;
29674 this[type + "Buffered_"] = loader.buffered_();
29675 }
29676 /**
29677 * Checks on every segment `appendsdone` to see
29678 * if segment appends are making progress. If they are not
29679 * and we are still downloading bytes. We blacklist the playlist.
29680 *
29681 * @param {string} type
29682 * The segment loader type to check.
29683 *
29684 * @listens SegmentLoader#appendsdone
29685 */
29686 ;
29687
29688 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
29689 var mpc = this.masterPlaylistController_;
29690 var loader = mpc[type + "SegmentLoader_"];
29691 var buffered = loader.buffered_();
29692 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
29693 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
29694 // the buffered value for this loader changed
29695 // appends are working
29696
29697 if (isBufferedDifferent) {
29698 this.resetSegmentDownloads_(type);
29699 return;
29700 }
29701
29702 this[type + "StalledDownloads_"]++;
29703 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
29704 playlistId: loader.playlist_ && loader.playlist_.id,
29705 buffered: timeRangesToArray(buffered)
29706 }); // after 10 possibly stalled appends with no reset, exclude
29707
29708 if (this[type + "StalledDownloads_"] < 10) {
29709 return;
29710 }
29711
29712 this.logger_(type + " loader stalled download exclusion");
29713 this.resetSegmentDownloads_(type);
29714 this.tech_.trigger({
29715 type: 'usage',
29716 name: "vhs-" + type + "-download-exclusion"
29717 });
29718
29719 if (type === 'subtitle') {
29720 return;
29721 } // TODO: should we exclude audio tracks rather than main tracks
29722 // when type is audio?
29723
29724
29725 mpc.blacklistCurrentPlaylist({
29726 message: "Excessive " + type + " segment downloading detected."
29727 }, Infinity);
29728 }
29729 /**
29730 * The purpose of this function is to emulate the "waiting" event on
29731 * browsers that do not emit it when they are waiting for more
29732 * data to continue playback
29733 *
29734 * @private
29735 */
29736 ;
29737
29738 _proto.checkCurrentTime_ = function checkCurrentTime_() {
29739 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
29740 this.consecutiveUpdates = 0;
29741 this.lastRecordedTime = this.tech_.currentTime();
29742 return;
29743 }
29744
29745 if (this.tech_.paused() || this.tech_.seeking()) {
29746 return;
29747 }
29748
29749 var currentTime = this.tech_.currentTime();
29750 var buffered = this.tech_.buffered();
29751
29752 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
29753 // If current time is at the end of the final buffered region, then any playback
29754 // stall is most likely caused by buffering in a low bandwidth environment. The tech
29755 // should fire a `waiting` event in this scenario, but due to browser and tech
29756 // inconsistencies. Calling `techWaiting_` here allows us to simulate
29757 // responding to a native `waiting` event when the tech fails to emit one.
29758 return this.techWaiting_();
29759 }
29760
29761 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
29762 this.consecutiveUpdates++;
29763 this.waiting_();
29764 } else if (currentTime === this.lastRecordedTime) {
29765 this.consecutiveUpdates++;
29766 } else {
29767 this.consecutiveUpdates = 0;
29768 this.lastRecordedTime = currentTime;
29769 }
29770 }
29771 /**
29772 * Cancels any pending timers and resets the 'timeupdate' mechanism
29773 * designed to detect that we are stalled
29774 *
29775 * @private
29776 */
29777 ;
29778
29779 _proto.cancelTimer_ = function cancelTimer_() {
29780 this.consecutiveUpdates = 0;
29781
29782 if (this.timer_) {
29783 this.logger_('cancelTimer_');
29784 clearTimeout(this.timer_);
29785 }
29786
29787 this.timer_ = null;
29788 }
29789 /**
29790 * Fixes situations where there's a bad seek
29791 *
29792 * @return {boolean} whether an action was taken to fix the seek
29793 * @private
29794 */
29795 ;
29796
29797 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
29798 var seeking = this.tech_.seeking();
29799
29800 if (!seeking) {
29801 return false;
29802 }
29803
29804 var seekable = this.seekable();
29805 var currentTime = this.tech_.currentTime();
29806 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
29807 var seekTo;
29808
29809 if (isAfterSeekableRange) {
29810 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
29811
29812 seekTo = seekableEnd;
29813 }
29814
29815 if (this.beforeSeekableWindow_(seekable, currentTime)) {
29816 var seekableStart = seekable.start(0); // sync to the beginning of the live window
29817 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
29818
29819 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
29820 // happen in live with a 3 segment playlist), then don't use a time delta
29821 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
29822 }
29823
29824 if (typeof seekTo !== 'undefined') {
29825 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
29826 this.tech_.setCurrentTime(seekTo);
29827 return true;
29828 }
29829
29830 var buffered = this.tech_.buffered();
29831
29832 if (closeToBufferedContent({
29833 buffered: buffered,
29834 targetDuration: this.media().targetDuration,
29835 currentTime: currentTime
29836 })) {
29837 seekTo = buffered.start(0) + SAFE_TIME_DELTA;
29838 this.logger_("Buffered region starts (" + buffered.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
29839 this.tech_.setCurrentTime(seekTo);
29840 return true;
29841 }
29842
29843 return false;
29844 }
29845 /**
29846 * Handler for situations when we determine the player is waiting.
29847 *
29848 * @private
29849 */
29850 ;
29851
29852 _proto.waiting_ = function waiting_() {
29853 if (this.techWaiting_()) {
29854 return;
29855 } // All tech waiting checks failed. Use last resort correction
29856
29857
29858 var currentTime = this.tech_.currentTime();
29859 var buffered = this.tech_.buffered();
29860 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
29861 // region with no indication that anything is amiss (seen in Firefox). Seeking to
29862 // currentTime is usually enough to kickstart the player. This checks that the player
29863 // is currently within a buffered region before attempting a corrective seek.
29864 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
29865 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
29866 // make sure there is ~3 seconds of forward buffer before taking any corrective action
29867 // to avoid triggering an `unknownwaiting` event when the network is slow.
29868
29869 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
29870 this.cancelTimer_();
29871 this.tech_.setCurrentTime(currentTime);
29872 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
29873
29874 this.tech_.trigger({
29875 type: 'usage',
29876 name: 'vhs-unknown-waiting'
29877 });
29878 this.tech_.trigger({
29879 type: 'usage',
29880 name: 'hls-unknown-waiting'
29881 });
29882 return;
29883 }
29884 }
29885 /**
29886 * Handler for situations when the tech fires a `waiting` event
29887 *
29888 * @return {boolean}
29889 * True if an action (or none) was needed to correct the waiting. False if no
29890 * checks passed
29891 * @private
29892 */
29893 ;
29894
29895 _proto.techWaiting_ = function techWaiting_() {
29896 var seekable = this.seekable();
29897 var currentTime = this.tech_.currentTime();
29898
29899 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
29900 // Tech is seeking or bad seek fixed, no action needed
29901 return true;
29902 }
29903
29904 if (this.tech_.seeking() || this.timer_ !== null) {
29905 // Tech is seeking or already waiting on another action, no action needed
29906 return true;
29907 }
29908
29909 if (this.beforeSeekableWindow_(seekable, currentTime)) {
29910 var livePoint = seekable.end(seekable.length - 1);
29911 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
29912 this.cancelTimer_();
29913 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
29914
29915 this.tech_.trigger({
29916 type: 'usage',
29917 name: 'vhs-live-resync'
29918 });
29919 this.tech_.trigger({
29920 type: 'usage',
29921 name: 'hls-live-resync'
29922 });
29923 return true;
29924 }
29925
29926 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
29927 var buffered = this.tech_.buffered();
29928 var videoUnderflow = this.videoUnderflow_({
29929 audioBuffered: sourceUpdater.audioBuffered(),
29930 videoBuffered: sourceUpdater.videoBuffered(),
29931 currentTime: currentTime
29932 });
29933
29934 if (videoUnderflow) {
29935 // Even though the video underflowed and was stuck in a gap, the audio overplayed
29936 // the gap, leading currentTime into a buffered range. Seeking to currentTime
29937 // allows the video to catch up to the audio position without losing any audio
29938 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
29939 this.cancelTimer_();
29940 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
29941
29942 this.tech_.trigger({
29943 type: 'usage',
29944 name: 'vhs-video-underflow'
29945 });
29946 this.tech_.trigger({
29947 type: 'usage',
29948 name: 'hls-video-underflow'
29949 });
29950 return true;
29951 }
29952
29953 var nextRange = findNextRange(buffered, currentTime); // check for gap
29954
29955 if (nextRange.length > 0) {
29956 var difference = nextRange.start(0) - currentTime;
29957 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
29958 this.cancelTimer_();
29959 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
29960 return true;
29961 } // All checks failed. Returning false to indicate failure to correct waiting
29962
29963
29964 return false;
29965 };
29966
29967 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
29968 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
29969 allowSeeksWithinUnsafeLiveWindow = false;
29970 }
29971
29972 if (!seekable.length) {
29973 // we can't make a solid case if there's no seekable, default to false
29974 return false;
29975 }
29976
29977 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
29978 var isLive = !playlist.endList;
29979
29980 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
29981 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
29982 }
29983
29984 if (currentTime > allowedEnd) {
29985 return true;
29986 }
29987
29988 return false;
29989 };
29990
29991 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
29992 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
29993 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
29994 return true;
29995 }
29996
29997 return false;
29998 };
29999
30000 _proto.videoUnderflow_ = function videoUnderflow_(_ref2) {
30001 var videoBuffered = _ref2.videoBuffered,
30002 audioBuffered = _ref2.audioBuffered,
30003 currentTime = _ref2.currentTime;
30004
30005 // audio only content will not have video underflow :)
30006 if (!videoBuffered) {
30007 return;
30008 }
30009
30010 var gap; // find a gap in demuxed content.
30011
30012 if (videoBuffered.length && audioBuffered.length) {
30013 // in Chrome audio will continue to play for ~3s when we run out of video
30014 // so we have to check that the video buffer did have some buffer in the
30015 // past.
30016 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
30017 var videoRange = findRange(videoBuffered, currentTime);
30018 var audioRange = findRange(audioBuffered, currentTime);
30019
30020 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
30021 gap = {
30022 start: lastVideoRange.end(0),
30023 end: audioRange.end(0)
30024 };
30025 } // find a gap in muxed content.
30026
30027 } else {
30028 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
30029 // stuck in a gap due to video underflow.
30030
30031 if (!nextRange.length) {
30032 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
30033 }
30034 }
30035
30036 if (gap) {
30037 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
30038 return true;
30039 }
30040
30041 return false;
30042 }
30043 /**
30044 * Timer callback. If playback still has not proceeded, then we seek
30045 * to the start of the next buffered region.
30046 *
30047 * @private
30048 */
30049 ;
30050
30051 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
30052 var buffered = this.tech_.buffered();
30053 var currentTime = this.tech_.currentTime();
30054 var nextRange = findNextRange(buffered, currentTime);
30055 this.cancelTimer_();
30056
30057 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
30058 return;
30059 }
30060
30061 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
30062
30063 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
30064 this.tech_.trigger({
30065 type: 'usage',
30066 name: 'vhs-gap-skip'
30067 });
30068 this.tech_.trigger({
30069 type: 'usage',
30070 name: 'hls-gap-skip'
30071 });
30072 };
30073
30074 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
30075 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
30076 // playing for ~3 seconds after the video gap starts. This is done to account for
30077 // video buffer underflow/underrun (note that this is not done when there is audio
30078 // buffer underflow/underrun -- in that case the video will stop as soon as it
30079 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
30080 // video stalls). The player's time will reflect the playthrough of audio, so the
30081 // time will appear as if we are in a buffered region, even if we are stuck in a
30082 // "gap."
30083 //
30084 // Example:
30085 // video buffer: 0 => 10.1, 10.2 => 20
30086 // audio buffer: 0 => 20
30087 // overall buffer: 0 => 10.1, 10.2 => 20
30088 // current time: 13
30089 //
30090 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
30091 // however, the audio continued playing until it reached ~3 seconds past the gap
30092 // (13 seconds), at which point it stops as well. Since current time is past the
30093 // gap, findNextRange will return no ranges.
30094 //
30095 // To check for this issue, we see if there is a gap that starts somewhere within
30096 // a 3 second range (3 seconds +/- 1 second) back from our current time.
30097 var gaps = findGaps(buffered);
30098
30099 for (var i = 0; i < gaps.length; i++) {
30100 var start = gaps.start(i);
30101 var end = gaps.end(i); // gap is starts no more than 4 seconds back
30102
30103 if (currentTime - start < 4 && currentTime - start > 2) {
30104 return {
30105 start: start,
30106 end: end
30107 };
30108 }
30109 }
30110
30111 return null;
30112 };
30113
30114 return PlaybackWatcher;
30115 }();
30116
30117 var defaultOptions = {
30118 errorInterval: 30,
30119 getSource: function getSource(next) {
30120 var tech = this.tech({
30121 IWillNotUseThisInPlugins: true
30122 });
30123 var sourceObj = tech.currentSource_ || this.currentSource();
30124 return next(sourceObj);
30125 }
30126 };
30127 /**
30128 * Main entry point for the plugin
30129 *
30130 * @param {Player} player a reference to a videojs Player instance
30131 * @param {Object} [options] an object with plugin options
30132 * @private
30133 */
30134
30135 var initPlugin = function initPlugin(player, options) {
30136 var lastCalled = 0;
30137 var seekTo = 0;
30138 var localOptions = videojs__default['default'].mergeOptions(defaultOptions, options);
30139 player.ready(function () {
30140 player.trigger({
30141 type: 'usage',
30142 name: 'vhs-error-reload-initialized'
30143 });
30144 player.trigger({
30145 type: 'usage',
30146 name: 'hls-error-reload-initialized'
30147 });
30148 });
30149 /**
30150 * Player modifications to perform that must wait until `loadedmetadata`
30151 * has been triggered
30152 *
30153 * @private
30154 */
30155
30156 var loadedMetadataHandler = function loadedMetadataHandler() {
30157 if (seekTo) {
30158 player.currentTime(seekTo);
30159 }
30160 };
30161 /**
30162 * Set the source on the player element, play, and seek if necessary
30163 *
30164 * @param {Object} sourceObj An object specifying the source url and mime-type to play
30165 * @private
30166 */
30167
30168
30169 var setSource = function setSource(sourceObj) {
30170 if (sourceObj === null || sourceObj === undefined) {
30171 return;
30172 }
30173
30174 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
30175 player.one('loadedmetadata', loadedMetadataHandler);
30176 player.src(sourceObj);
30177 player.trigger({
30178 type: 'usage',
30179 name: 'vhs-error-reload'
30180 });
30181 player.trigger({
30182 type: 'usage',
30183 name: 'hls-error-reload'
30184 });
30185 player.play();
30186 };
30187 /**
30188 * Attempt to get a source from either the built-in getSource function
30189 * or a custom function provided via the options
30190 *
30191 * @private
30192 */
30193
30194
30195 var errorHandler = function errorHandler() {
30196 // Do not attempt to reload the source if a source-reload occurred before
30197 // 'errorInterval' time has elapsed since the last source-reload
30198 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
30199 player.trigger({
30200 type: 'usage',
30201 name: 'vhs-error-reload-canceled'
30202 });
30203 player.trigger({
30204 type: 'usage',
30205 name: 'hls-error-reload-canceled'
30206 });
30207 return;
30208 }
30209
30210 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
30211 videojs__default['default'].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
30212 return;
30213 }
30214
30215 lastCalled = Date.now();
30216 return localOptions.getSource.call(player, setSource);
30217 };
30218 /**
30219 * Unbind any event handlers that were bound by the plugin
30220 *
30221 * @private
30222 */
30223
30224
30225 var cleanupEvents = function cleanupEvents() {
30226 player.off('loadedmetadata', loadedMetadataHandler);
30227 player.off('error', errorHandler);
30228 player.off('dispose', cleanupEvents);
30229 };
30230 /**
30231 * Cleanup before re-initializing the plugin
30232 *
30233 * @param {Object} [newOptions] an object with plugin options
30234 * @private
30235 */
30236
30237
30238 var reinitPlugin = function reinitPlugin(newOptions) {
30239 cleanupEvents();
30240 initPlugin(player, newOptions);
30241 };
30242
30243 player.on('error', errorHandler);
30244 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
30245 // initializing the plugin
30246
30247 player.reloadSourceOnError = reinitPlugin;
30248 };
30249 /**
30250 * Reload the source when an error is detected as long as there
30251 * wasn't an error previously within the last 30 seconds
30252 *
30253 * @param {Object} [options] an object with plugin options
30254 */
30255
30256
30257 var reloadSourceOnError = function reloadSourceOnError(options) {
30258 initPlugin(this, options);
30259 };
30260
30261 var version$4 = "2.7.0";
30262
30263 var version$3 = "5.11.0";
30264
30265 var version$2 = "0.16.0";
30266
30267 var version$1 = "4.6.0";
30268
30269 var version = "3.1.2";
30270
30271 var Vhs = {
30272 PlaylistLoader: PlaylistLoader,
30273 Playlist: Playlist,
30274 utils: utils$1,
30275 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
30276 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
30277 lastBandwidthSelector: lastBandwidthSelector,
30278 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
30279 comparePlaylistBandwidth: comparePlaylistBandwidth,
30280 comparePlaylistResolution: comparePlaylistResolution,
30281 xhr: xhrFactory()
30282 }; // Define getter/setters for config properties
30283
30284 Object.keys(Config).forEach(function (prop) {
30285 Object.defineProperty(Vhs, prop, {
30286 get: function get() {
30287 videojs__default['default'].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
30288 return Config[prop];
30289 },
30290 set: function set(value) {
30291 videojs__default['default'].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
30292
30293 if (typeof value !== 'number' || value < 0) {
30294 videojs__default['default'].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
30295 return;
30296 }
30297
30298 Config[prop] = value;
30299 }
30300 });
30301 });
30302 var LOCAL_STORAGE_KEY = 'videojs-vhs';
30303 /**
30304 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
30305 *
30306 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
30307 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
30308 * @function handleVhsMediaChange
30309 */
30310
30311 var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
30312 var newPlaylist = playlistLoader.media();
30313 var selectedIndex = -1;
30314
30315 for (var i = 0; i < qualityLevels.length; i++) {
30316 if (qualityLevels[i].id === newPlaylist.id) {
30317 selectedIndex = i;
30318 break;
30319 }
30320 }
30321
30322 qualityLevels.selectedIndex_ = selectedIndex;
30323 qualityLevels.trigger({
30324 selectedIndex: selectedIndex,
30325 type: 'change'
30326 });
30327 };
30328 /**
30329 * Adds quality levels to list once playlist metadata is available
30330 *
30331 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
30332 * @param {Object} vhs Vhs object to listen to for media events.
30333 * @function handleVhsLoadedMetadata
30334 */
30335
30336
30337 var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
30338 vhs.representations().forEach(function (rep) {
30339 qualityLevels.addQualityLevel(rep);
30340 });
30341 handleVhsMediaChange(qualityLevels, vhs.playlists);
30342 }; // HLS is a source handler, not a tech. Make sure attempts to use it
30343 // as one do not cause exceptions.
30344
30345
30346 Vhs.canPlaySource = function () {
30347 return videojs__default['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
30348 };
30349
30350 var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
30351 if (!keySystemOptions) {
30352 return keySystemOptions;
30353 }
30354
30355 var codecs = {};
30356
30357 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
30358 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
30359 }
30360
30361 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
30362 codecs.audio = audioPlaylist.attributes.CODECS;
30363 }
30364
30365 var videoContentType = getMimeForCodec(codecs.video);
30366 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
30367
30368 var keySystemContentTypes = {};
30369
30370 for (var keySystem in keySystemOptions) {
30371 keySystemContentTypes[keySystem] = {};
30372
30373 if (audioContentType) {
30374 keySystemContentTypes[keySystem].audioContentType = audioContentType;
30375 }
30376
30377 if (videoContentType) {
30378 keySystemContentTypes[keySystem].videoContentType = videoContentType;
30379 } // Default to using the video playlist's PSSH even though they may be different, as
30380 // videojs-contrib-eme will only accept one in the options.
30381 //
30382 // This shouldn't be an issue for most cases as early intialization will handle all
30383 // unique PSSH values, and if they aren't, then encrypted events should have the
30384 // specific information needed for the unique license.
30385
30386
30387 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
30388 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
30389 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
30390 // so we need to prevent overwriting the URL entirely
30391
30392
30393 if (typeof keySystemOptions[keySystem] === 'string') {
30394 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
30395 }
30396 }
30397
30398 return videojs__default['default'].mergeOptions(keySystemOptions, keySystemContentTypes);
30399 };
30400 /**
30401 * @typedef {Object} KeySystems
30402 *
30403 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
30404 * Note: not all options are listed here.
30405 *
30406 * @property {Uint8Array} [pssh]
30407 * Protection System Specific Header
30408 */
30409
30410 /**
30411 * Goes through all the playlists and collects an array of KeySystems options objects
30412 * containing each playlist's keySystems and their pssh values, if available.
30413 *
30414 * @param {Object[]} playlists
30415 * The playlists to look through
30416 * @param {string[]} keySystems
30417 * The keySystems to collect pssh values for
30418 *
30419 * @return {KeySystems[]}
30420 * An array of KeySystems objects containing available key systems and their
30421 * pssh values
30422 */
30423
30424
30425 var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
30426 return playlists.reduce(function (keySystemsArr, playlist) {
30427 if (!playlist.contentProtection) {
30428 return keySystemsArr;
30429 }
30430
30431 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
30432 var keySystemOptions = playlist.contentProtection[keySystem];
30433
30434 if (keySystemOptions && keySystemOptions.pssh) {
30435 keySystemsObj[keySystem] = {
30436 pssh: keySystemOptions.pssh
30437 };
30438 }
30439
30440 return keySystemsObj;
30441 }, {});
30442
30443 if (Object.keys(keySystemsOptions).length) {
30444 keySystemsArr.push(keySystemsOptions);
30445 }
30446
30447 return keySystemsArr;
30448 }, []);
30449 };
30450 /**
30451 * Returns a promise that waits for the
30452 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
30453 *
30454 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
30455 * browsers.
30456 *
30457 * As per the above ticket, this is particularly important for Chrome, where, if
30458 * unencrypted content is appended before encrypted content and the key session has not
30459 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
30460 * during playback.
30461 *
30462 * @param {Object} player
30463 * The player instance
30464 * @param {Object[]} sourceKeySystems
30465 * The key systems options from the player source
30466 * @param {Object} [audioMedia]
30467 * The active audio media playlist (optional)
30468 * @param {Object[]} mainPlaylists
30469 * The playlists found on the master playlist object
30470 *
30471 * @return {Object}
30472 * Promise that resolves when the key session has been created
30473 */
30474
30475
30476 var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
30477 var player = _ref.player,
30478 sourceKeySystems = _ref.sourceKeySystems,
30479 audioMedia = _ref.audioMedia,
30480 mainPlaylists = _ref.mainPlaylists;
30481
30482 if (!player.eme.initializeMediaKeys) {
30483 return Promise.resolve();
30484 } // TODO should all audio PSSH values be initialized for DRM?
30485 //
30486 // All unique video rendition pssh values are initialized for DRM, but here only
30487 // the initial audio playlist license is initialized. In theory, an encrypted
30488 // event should be fired if the user switches to an alternative audio playlist
30489 // where a license is required, but this case hasn't yet been tested. In addition, there
30490 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
30491 // languages).
30492
30493
30494 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
30495 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
30496 var initializationFinishedPromises = [];
30497 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
30498 // only place where it should not be deduped is for ms-prefixed APIs, but the early
30499 // return for IE11 above, and the existence of modern EME APIs in addition to
30500 // ms-prefixed APIs on Edge should prevent this from being a concern.
30501 // initializeMediaKeys also won't use the webkit-prefixed APIs.
30502
30503 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
30504 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
30505 player.tech_.one('keysessioncreated', resolve);
30506 }));
30507 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
30508 player.eme.initializeMediaKeys({
30509 keySystems: keySystemsOptions
30510 }, function (err) {
30511 if (err) {
30512 reject(err);
30513 return;
30514 }
30515
30516 resolve();
30517 });
30518 }));
30519 }); // The reasons Promise.race is chosen over Promise.any:
30520 //
30521 // * Promise.any is only available in Safari 14+.
30522 // * None of these promises are expected to reject. If they do reject, it might be
30523 // better here for the race to surface the rejection, rather than mask it by using
30524 // Promise.any.
30525
30526 return Promise.race([// If a session was previously created, these will all finish resolving without
30527 // creating a new session, otherwise it will take until the end of all license
30528 // requests, which is why the key session check is used (to make setup much faster).
30529 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
30530 Promise.race(keySessionCreatedPromises)]);
30531 };
30532 /**
30533 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
30534 * there are keySystems on the source, sets up source options to prepare the source for
30535 * eme.
30536 *
30537 * @param {Object} player
30538 * The player instance
30539 * @param {Object[]} sourceKeySystems
30540 * The key systems options from the player source
30541 * @param {Object} media
30542 * The active media playlist
30543 * @param {Object} [audioMedia]
30544 * The active audio media playlist (optional)
30545 *
30546 * @return {boolean}
30547 * Whether or not options were configured and EME is available
30548 */
30549
30550 var setupEmeOptions = function setupEmeOptions(_ref2) {
30551 var player = _ref2.player,
30552 sourceKeySystems = _ref2.sourceKeySystems,
30553 media = _ref2.media,
30554 audioMedia = _ref2.audioMedia;
30555 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
30556
30557 if (!sourceOptions) {
30558 return false;
30559 }
30560
30561 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
30562 // do nothing.
30563
30564 if (sourceOptions && !player.eme) {
30565 videojs__default['default'].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
30566 return false;
30567 }
30568
30569 return true;
30570 };
30571
30572 var getVhsLocalStorage = function getVhsLocalStorage() {
30573 if (!window__default['default'].localStorage) {
30574 return null;
30575 }
30576
30577 var storedObject = window__default['default'].localStorage.getItem(LOCAL_STORAGE_KEY);
30578
30579 if (!storedObject) {
30580 return null;
30581 }
30582
30583 try {
30584 return JSON.parse(storedObject);
30585 } catch (e) {
30586 // someone may have tampered with the value
30587 return null;
30588 }
30589 };
30590
30591 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
30592 if (!window__default['default'].localStorage) {
30593 return false;
30594 }
30595
30596 var objectToStore = getVhsLocalStorage();
30597 objectToStore = objectToStore ? videojs__default['default'].mergeOptions(objectToStore, options) : options;
30598
30599 try {
30600 window__default['default'].localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
30601 } catch (e) {
30602 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
30603 // storage is set to 0).
30604 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
30605 // No need to perform any operation.
30606 return false;
30607 }
30608
30609 return objectToStore;
30610 };
30611 /**
30612 * Parses VHS-supported media types from data URIs. See
30613 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
30614 * for information on data URIs.
30615 *
30616 * @param {string} dataUri
30617 * The data URI
30618 *
30619 * @return {string|Object}
30620 * The parsed object/string, or the original string if no supported media type
30621 * was found
30622 */
30623
30624
30625 var expandDataUri = function expandDataUri(dataUri) {
30626 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
30627 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
30628 } // no known case for this data URI, return the string as-is
30629
30630
30631 return dataUri;
30632 };
30633 /**
30634 * Whether the browser has built-in HLS support.
30635 */
30636
30637
30638 Vhs.supportsNativeHls = function () {
30639 if (!document__default['default'] || !document__default['default'].createElement) {
30640 return false;
30641 }
30642
30643 var video = document__default['default'].createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
30644
30645 if (!videojs__default['default'].getTech('Html5').isSupported()) {
30646 return false;
30647 } // HLS manifests can go by many mime-types
30648
30649
30650 var canPlay = [// Apple santioned
30651 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
30652 'audio/mpegurl', // Very common
30653 'audio/x-mpegurl', // Very common
30654 'application/x-mpegurl', // Included for completeness
30655 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
30656 return canPlay.some(function (canItPlay) {
30657 return /maybe|probably/i.test(video.canPlayType(canItPlay));
30658 });
30659 }();
30660
30661 Vhs.supportsNativeDash = function () {
30662 if (!document__default['default'] || !document__default['default'].createElement || !videojs__default['default'].getTech('Html5').isSupported()) {
30663 return false;
30664 }
30665
30666 return /maybe|probably/i.test(document__default['default'].createElement('video').canPlayType('application/dash+xml'));
30667 }();
30668
30669 Vhs.supportsTypeNatively = function (type) {
30670 if (type === 'hls') {
30671 return Vhs.supportsNativeHls;
30672 }
30673
30674 if (type === 'dash') {
30675 return Vhs.supportsNativeDash;
30676 }
30677
30678 return false;
30679 };
30680 /**
30681 * HLS is a source handler, not a tech. Make sure attempts to use it
30682 * as one do not cause exceptions.
30683 */
30684
30685
30686 Vhs.isSupported = function () {
30687 return videojs__default['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
30688 };
30689
30690 var Component = videojs__default['default'].getComponent('Component');
30691 /**
30692 * The Vhs Handler object, where we orchestrate all of the parts
30693 * of HLS to interact with video.js
30694 *
30695 * @class VhsHandler
30696 * @extends videojs.Component
30697 * @param {Object} source the soruce object
30698 * @param {Tech} tech the parent tech object
30699 * @param {Object} options optional and required options
30700 */
30701
30702 var VhsHandler = /*#__PURE__*/function (_Component) {
30703 inheritsLoose(VhsHandler, _Component);
30704
30705 function VhsHandler(source, tech, options) {
30706 var _this;
30707
30708 _this = _Component.call(this, tech, videojs__default['default'].mergeOptions(options.hls, options.vhs)) || this;
30709
30710 if (options.hls && Object.keys(options.hls).length) {
30711 videojs__default['default'].log.warn('Using hls options is deprecated. Use vhs instead.');
30712 }
30713
30714 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
30715 // backwards-compatibility
30716
30717 if (tech.options_ && tech.options_.playerId) {
30718 var _player = videojs__default['default'](tech.options_.playerId);
30719
30720 if (!_player.hasOwnProperty('hls')) {
30721 Object.defineProperty(_player, 'hls', {
30722 get: function get() {
30723 videojs__default['default'].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
30724 tech.trigger({
30725 type: 'usage',
30726 name: 'hls-player-access'
30727 });
30728 return assertThisInitialized(_this);
30729 },
30730 configurable: true
30731 });
30732 }
30733
30734 if (!_player.hasOwnProperty('vhs')) {
30735 Object.defineProperty(_player, 'vhs', {
30736 get: function get() {
30737 videojs__default['default'].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
30738 tech.trigger({
30739 type: 'usage',
30740 name: 'vhs-player-access'
30741 });
30742 return assertThisInitialized(_this);
30743 },
30744 configurable: true
30745 });
30746 }
30747
30748 if (!_player.hasOwnProperty('dash')) {
30749 Object.defineProperty(_player, 'dash', {
30750 get: function get() {
30751 videojs__default['default'].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
30752 return assertThisInitialized(_this);
30753 },
30754 configurable: true
30755 });
30756 }
30757
30758 _this.player_ = _player;
30759 }
30760
30761 _this.tech_ = tech;
30762 _this.source_ = source;
30763 _this.stats = {};
30764 _this.ignoreNextSeekingEvent_ = false;
30765
30766 _this.setOptions_();
30767
30768 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
30769 tech.overrideNativeAudioTracks(true);
30770 tech.overrideNativeVideoTracks(true);
30771 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
30772 // overriding native HLS only works if audio tracks have been emulated
30773 // error early if we're misconfigured
30774 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
30775 } // listen for fullscreenchange events for this player so that we
30776 // can adjust our quality selection quickly
30777
30778
30779 _this.on(document__default['default'], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
30780 var fullscreenElement = document__default['default'].fullscreenElement || document__default['default'].webkitFullscreenElement || document__default['default'].mozFullScreenElement || document__default['default'].msFullscreenElement;
30781
30782 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
30783 _this.masterPlaylistController_.smoothQualityChange_();
30784 }
30785 });
30786
30787 _this.on(_this.tech_, 'seeking', function () {
30788 if (this.ignoreNextSeekingEvent_) {
30789 this.ignoreNextSeekingEvent_ = false;
30790 return;
30791 }
30792
30793 this.setCurrentTime(this.tech_.currentTime());
30794 });
30795
30796 _this.on(_this.tech_, 'error', function () {
30797 // verify that the error was real and we are loaded
30798 // enough to have mpc loaded.
30799 if (this.tech_.error() && this.masterPlaylistController_) {
30800 this.masterPlaylistController_.pauseLoading();
30801 }
30802 });
30803
30804 _this.on(_this.tech_, 'play', _this.play);
30805
30806 return _this;
30807 }
30808
30809 var _proto = VhsHandler.prototype;
30810
30811 _proto.setOptions_ = function setOptions_() {
30812 var _this2 = this;
30813
30814 // defaults
30815 this.options_.withCredentials = this.options_.withCredentials || false;
30816 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
30817 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
30818 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
30819 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
30820 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
30821 this.options_.customTagParsers = this.options_.customTagParsers || [];
30822 this.options_.customTagMappers = this.options_.customTagMappers || [];
30823 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
30824 this.options_.handlePartialData = this.options_.handlePartialData || false;
30825
30826 if (typeof this.options_.blacklistDuration !== 'number') {
30827 this.options_.blacklistDuration = 5 * 60;
30828 }
30829
30830 if (typeof this.options_.bandwidth !== 'number') {
30831 if (this.options_.useBandwidthFromLocalStorage) {
30832 var storedObject = getVhsLocalStorage();
30833
30834 if (storedObject && storedObject.bandwidth) {
30835 this.options_.bandwidth = storedObject.bandwidth;
30836 this.tech_.trigger({
30837 type: 'usage',
30838 name: 'vhs-bandwidth-from-local-storage'
30839 });
30840 this.tech_.trigger({
30841 type: 'usage',
30842 name: 'hls-bandwidth-from-local-storage'
30843 });
30844 }
30845
30846 if (storedObject && storedObject.throughput) {
30847 this.options_.throughput = storedObject.throughput;
30848 this.tech_.trigger({
30849 type: 'usage',
30850 name: 'vhs-throughput-from-local-storage'
30851 });
30852 this.tech_.trigger({
30853 type: 'usage',
30854 name: 'hls-throughput-from-local-storage'
30855 });
30856 }
30857 }
30858 } // if bandwidth was not set by options or pulled from local storage, start playlist
30859 // selection at a reasonable bandwidth
30860
30861
30862 if (typeof this.options_.bandwidth !== 'number') {
30863 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
30864 } // If the bandwidth number is unchanged from the initial setting
30865 // then this takes precedence over the enableLowInitialPlaylist option
30866
30867
30868 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
30869
30870 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'handlePartialData', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS'].forEach(function (option) {
30871 if (typeof _this2.source_[option] !== 'undefined') {
30872 _this2.options_[option] = _this2.source_[option];
30873 }
30874 });
30875 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
30876 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
30877 }
30878 /**
30879 * called when player.src gets called, handle a new source
30880 *
30881 * @param {Object} src the source object to handle
30882 */
30883 ;
30884
30885 _proto.src = function src(_src, type) {
30886 var _this3 = this;
30887
30888 // do nothing if the src is falsey
30889 if (!_src) {
30890 return;
30891 }
30892
30893 this.setOptions_(); // add master playlist controller options
30894
30895 this.options_.src = expandDataUri(this.source_.src);
30896 this.options_.tech = this.tech_;
30897 this.options_.externVhs = Vhs;
30898 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
30899
30900 this.options_.seekTo = function (time) {
30901 _this3.tech_.setCurrentTime(time);
30902 };
30903
30904 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
30905 var playbackWatcherOptions = videojs__default['default'].mergeOptions({
30906 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
30907 }, this.options_, {
30908 seekable: function seekable() {
30909 return _this3.seekable();
30910 },
30911 media: function media() {
30912 return _this3.masterPlaylistController_.media();
30913 },
30914 masterPlaylistController: this.masterPlaylistController_
30915 });
30916 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
30917 this.masterPlaylistController_.on('error', function () {
30918 var player = videojs__default['default'].players[_this3.tech_.options_.playerId];
30919 var error = _this3.masterPlaylistController_.error;
30920
30921 if (typeof error === 'object' && !error.code) {
30922 error.code = 3;
30923 } else if (typeof error === 'string') {
30924 error = {
30925 message: error,
30926 code: 3
30927 };
30928 }
30929
30930 player.error(error);
30931 });
30932 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
30933 // compatibility with < v2
30934
30935 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
30936 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
30937
30938 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
30939 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
30940 // controller. Using a custom property for backwards compatibility
30941 // with < v2
30942
30943 Object.defineProperties(this, {
30944 selectPlaylist: {
30945 get: function get() {
30946 return this.masterPlaylistController_.selectPlaylist;
30947 },
30948 set: function set(selectPlaylist) {
30949 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
30950 }
30951 },
30952 throughput: {
30953 get: function get() {
30954 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
30955 },
30956 set: function set(throughput) {
30957 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
30958 // for the cumulative average
30959
30960 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
30961 }
30962 },
30963 bandwidth: {
30964 get: function get() {
30965 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
30966 },
30967 set: function set(bandwidth) {
30968 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
30969 // `count` is set to zero that current value of `rate` isn't included
30970 // in the cumulative average
30971
30972 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
30973 rate: 0,
30974 count: 0
30975 };
30976 }
30977 },
30978
30979 /**
30980 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
30981 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
30982 * the entire process after that - decryption, transmuxing, and appending - provided
30983 * by `throughput`.
30984 *
30985 * Since the two process are serial, the overall system bandwidth is given by:
30986 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
30987 */
30988 systemBandwidth: {
30989 get: function get() {
30990 var invBandwidth = 1 / (this.bandwidth || 1);
30991 var invThroughput;
30992
30993 if (this.throughput > 0) {
30994 invThroughput = 1 / this.throughput;
30995 } else {
30996 invThroughput = 0;
30997 }
30998
30999 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
31000 return systemBitrate;
31001 },
31002 set: function set() {
31003 videojs__default['default'].log.error('The "systemBandwidth" property is read-only');
31004 }
31005 }
31006 });
31007
31008 if (this.options_.bandwidth) {
31009 this.bandwidth = this.options_.bandwidth;
31010 }
31011
31012 if (this.options_.throughput) {
31013 this.throughput = this.options_.throughput;
31014 }
31015
31016 Object.defineProperties(this.stats, {
31017 bandwidth: {
31018 get: function get() {
31019 return _this3.bandwidth || 0;
31020 },
31021 enumerable: true
31022 },
31023 mediaRequests: {
31024 get: function get() {
31025 return _this3.masterPlaylistController_.mediaRequests_() || 0;
31026 },
31027 enumerable: true
31028 },
31029 mediaRequestsAborted: {
31030 get: function get() {
31031 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
31032 },
31033 enumerable: true
31034 },
31035 mediaRequestsTimedout: {
31036 get: function get() {
31037 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
31038 },
31039 enumerable: true
31040 },
31041 mediaRequestsErrored: {
31042 get: function get() {
31043 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
31044 },
31045 enumerable: true
31046 },
31047 mediaTransferDuration: {
31048 get: function get() {
31049 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
31050 },
31051 enumerable: true
31052 },
31053 mediaBytesTransferred: {
31054 get: function get() {
31055 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
31056 },
31057 enumerable: true
31058 },
31059 mediaSecondsLoaded: {
31060 get: function get() {
31061 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
31062 },
31063 enumerable: true
31064 },
31065 buffered: {
31066 get: function get() {
31067 return timeRangesToArray(_this3.tech_.buffered());
31068 },
31069 enumerable: true
31070 },
31071 currentTime: {
31072 get: function get() {
31073 return _this3.tech_.currentTime();
31074 },
31075 enumerable: true
31076 },
31077 currentSource: {
31078 get: function get() {
31079 return _this3.tech_.currentSource_;
31080 },
31081 enumerable: true
31082 },
31083 currentTech: {
31084 get: function get() {
31085 return _this3.tech_.name_;
31086 },
31087 enumerable: true
31088 },
31089 duration: {
31090 get: function get() {
31091 return _this3.tech_.duration();
31092 },
31093 enumerable: true
31094 },
31095 master: {
31096 get: function get() {
31097 return _this3.playlists.master;
31098 },
31099 enumerable: true
31100 },
31101 playerDimensions: {
31102 get: function get() {
31103 return _this3.tech_.currentDimensions();
31104 },
31105 enumerable: true
31106 },
31107 seekable: {
31108 get: function get() {
31109 return timeRangesToArray(_this3.tech_.seekable());
31110 },
31111 enumerable: true
31112 },
31113 timestamp: {
31114 get: function get() {
31115 return Date.now();
31116 },
31117 enumerable: true
31118 },
31119 videoPlaybackQuality: {
31120 get: function get() {
31121 return _this3.tech_.getVideoPlaybackQuality();
31122 },
31123 enumerable: true
31124 }
31125 });
31126 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
31127 this.tech_.on('bandwidthupdate', function () {
31128 if (_this3.options_.useBandwidthFromLocalStorage) {
31129 updateVhsLocalStorage({
31130 bandwidth: _this3.bandwidth,
31131 throughput: Math.round(_this3.throughput)
31132 });
31133 }
31134 });
31135 this.masterPlaylistController_.on('selectedinitialmedia', function () {
31136 // Add the manual rendition mix-in to VhsHandler
31137 renditionSelectionMixin(_this3);
31138 });
31139 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
31140 _this3.setupEme_();
31141 }); // the bandwidth of the primary segment loader is our best
31142 // estimate of overall bandwidth
31143
31144 this.on(this.masterPlaylistController_, 'progress', function () {
31145 this.tech_.trigger('progress');
31146 }); // In the live case, we need to ignore the very first `seeking` event since
31147 // that will be the result of the seek-to-live behavior
31148
31149 this.on(this.masterPlaylistController_, 'firstplay', function () {
31150 this.ignoreNextSeekingEvent_ = true;
31151 });
31152 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
31153 // this can occur if someone sets the src in player.ready(), for instance
31154
31155 if (!this.tech_.el()) {
31156 return;
31157 }
31158
31159 this.mediaSourceUrl_ = window__default['default'].URL.createObjectURL(this.masterPlaylistController_.mediaSource);
31160 this.tech_.src(this.mediaSourceUrl_);
31161 }
31162 /**
31163 * If necessary and EME is available, sets up EME options and waits for key session
31164 * creation.
31165 *
31166 * This function also updates the source updater so taht it can be used, as for some
31167 * browsers, EME must be configured before content is appended (if appending unencrypted
31168 * content before encrypted content).
31169 */
31170 ;
31171
31172 _proto.setupEme_ = function setupEme_() {
31173 var _this4 = this;
31174
31175 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
31176 var didSetupEmeOptions = setupEmeOptions({
31177 player: this.player_,
31178 sourceKeySystems: this.source_.keySystems,
31179 media: this.playlists.media(),
31180 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
31181 }); // In IE11 this is too early to initialize media keys, and IE11 does not support
31182 // promises.
31183
31184 if (videojs__default['default'].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
31185 // If EME options were not set up, we've done all we could to initialize EME.
31186 this.masterPlaylistController_.sourceUpdater_.initializedEme();
31187 return;
31188 }
31189
31190 this.logger_('waiting for EME key session creation');
31191 waitForKeySessionCreation({
31192 player: this.player_,
31193 sourceKeySystems: this.source_.keySystems,
31194 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
31195 mainPlaylists: this.playlists.master.playlists
31196 }).then(function () {
31197 _this4.logger_('created EME key session');
31198
31199 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
31200 }).catch(function (err) {
31201 _this4.logger_('error while creating EME key session', err);
31202
31203 _this4.player_.error({
31204 message: 'Failed to initialize media keys for EME',
31205 code: 3
31206 });
31207 });
31208 }
31209 /**
31210 * Initializes the quality levels and sets listeners to update them.
31211 *
31212 * @method setupQualityLevels_
31213 * @private
31214 */
31215 ;
31216
31217 _proto.setupQualityLevels_ = function setupQualityLevels_() {
31218 var _this5 = this;
31219
31220 var player = videojs__default['default'].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
31221 // or qualityLevels_ listeners have already been setup, do nothing.
31222
31223 if (!player || !player.qualityLevels || this.qualityLevels_) {
31224 return;
31225 }
31226
31227 this.qualityLevels_ = player.qualityLevels();
31228 this.masterPlaylistController_.on('selectedinitialmedia', function () {
31229 handleVhsLoadedMetadata(_this5.qualityLevels_, _this5);
31230 });
31231 this.playlists.on('mediachange', function () {
31232 handleVhsMediaChange(_this5.qualityLevels_, _this5.playlists);
31233 });
31234 }
31235 /**
31236 * return the version
31237 */
31238 ;
31239
31240 VhsHandler.version = function version$5() {
31241 return {
31242 '@videojs/http-streaming': version$4,
31243 'mux.js': version$3,
31244 'mpd-parser': version$2,
31245 'm3u8-parser': version$1,
31246 'aes-decrypter': version
31247 };
31248 }
31249 /**
31250 * return the version
31251 */
31252 ;
31253
31254 _proto.version = function version() {
31255 return this.constructor.version();
31256 };
31257
31258 _proto.canChangeType = function canChangeType() {
31259 return SourceUpdater.canChangeType();
31260 }
31261 /**
31262 * Begin playing the video.
31263 */
31264 ;
31265
31266 _proto.play = function play() {
31267 this.masterPlaylistController_.play();
31268 }
31269 /**
31270 * a wrapper around the function in MasterPlaylistController
31271 */
31272 ;
31273
31274 _proto.setCurrentTime = function setCurrentTime(currentTime) {
31275 this.masterPlaylistController_.setCurrentTime(currentTime);
31276 }
31277 /**
31278 * a wrapper around the function in MasterPlaylistController
31279 */
31280 ;
31281
31282 _proto.duration = function duration() {
31283 return this.masterPlaylistController_.duration();
31284 }
31285 /**
31286 * a wrapper around the function in MasterPlaylistController
31287 */
31288 ;
31289
31290 _proto.seekable = function seekable() {
31291 return this.masterPlaylistController_.seekable();
31292 }
31293 /**
31294 * Abort all outstanding work and cleanup.
31295 */
31296 ;
31297
31298 _proto.dispose = function dispose() {
31299 if (this.playbackWatcher_) {
31300 this.playbackWatcher_.dispose();
31301 }
31302
31303 if (this.masterPlaylistController_) {
31304 this.masterPlaylistController_.dispose();
31305 }
31306
31307 if (this.qualityLevels_) {
31308 this.qualityLevels_.dispose();
31309 }
31310
31311 if (this.player_) {
31312 delete this.player_.vhs;
31313 delete this.player_.dash;
31314 delete this.player_.hls;
31315 }
31316
31317 if (this.tech_ && this.tech_.vhs) {
31318 delete this.tech_.vhs;
31319 } // don't check this.tech_.hls as it will log a deprecated warning
31320
31321
31322 if (this.tech_) {
31323 delete this.tech_.hls;
31324 }
31325
31326 if (this.mediaSourceUrl_ && window__default['default'].URL.revokeObjectURL) {
31327 window__default['default'].URL.revokeObjectURL(this.mediaSourceUrl_);
31328 this.mediaSourceUrl_ = null;
31329 }
31330
31331 _Component.prototype.dispose.call(this);
31332 };
31333
31334 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
31335 return getProgramTime({
31336 playlist: this.masterPlaylistController_.media(),
31337 time: time,
31338 callback: callback
31339 });
31340 } // the player must be playing before calling this
31341 ;
31342
31343 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
31344 if (pauseAfterSeek === void 0) {
31345 pauseAfterSeek = true;
31346 }
31347
31348 if (retryCount === void 0) {
31349 retryCount = 2;
31350 }
31351
31352 return seekToProgramTime({
31353 programTime: programTime,
31354 playlist: this.masterPlaylistController_.media(),
31355 retryCount: retryCount,
31356 pauseAfterSeek: pauseAfterSeek,
31357 seekTo: this.options_.seekTo,
31358 tech: this.options_.tech,
31359 callback: callback
31360 });
31361 };
31362
31363 return VhsHandler;
31364 }(Component);
31365 /**
31366 * The Source Handler object, which informs video.js what additional
31367 * MIME types are supported and sets up playback. It is registered
31368 * automatically to the appropriate tech based on the capabilities of
31369 * the browser it is running in. It is not necessary to use or modify
31370 * this object in normal usage.
31371 */
31372
31373
31374 var VhsSourceHandler = {
31375 name: 'videojs-http-streaming',
31376 VERSION: version$4,
31377 canHandleSource: function canHandleSource(srcObj, options) {
31378 if (options === void 0) {
31379 options = {};
31380 }
31381
31382 var localOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options);
31383 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
31384 },
31385 handleSource: function handleSource(source, tech, options) {
31386 if (options === void 0) {
31387 options = {};
31388 }
31389
31390 var localOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options);
31391 tech.vhs = new VhsHandler(source, tech, localOptions);
31392
31393 if (!videojs__default['default'].hasOwnProperty('hls')) {
31394 Object.defineProperty(tech, 'hls', {
31395 get: function get() {
31396 videojs__default['default'].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
31397 return tech.vhs;
31398 },
31399 configurable: true
31400 });
31401 }
31402
31403 tech.vhs.xhr = xhrFactory();
31404 tech.vhs.src(source.src, source.type);
31405 return tech.vhs;
31406 },
31407 canPlayType: function canPlayType(type, options) {
31408 if (options === void 0) {
31409 options = {};
31410 }
31411
31412 var _videojs$mergeOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options),
31413 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs.overrideNative,
31414 overrideNative = _videojs$mergeOptions2 === void 0 ? !videojs__default['default'].browser.IS_ANY_SAFARI : _videojs$mergeOptions2;
31415
31416 var supportedType = simpleTypeFromSourceType(type);
31417 var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || overrideNative);
31418 return canUseMsePlayback ? 'maybe' : '';
31419 }
31420 };
31421 /**
31422 * Check to see if the native MediaSource object exists and supports
31423 * an MP4 container with both H.264 video and AAC-LC audio.
31424 *
31425 * @return {boolean} if native media sources are supported
31426 */
31427
31428 var supportsNativeMediaSources = function supportsNativeMediaSources() {
31429 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
31430 }; // register source handlers with the appropriate techs
31431
31432
31433 if (supportsNativeMediaSources()) {
31434 videojs__default['default'].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
31435 }
31436
31437 videojs__default['default'].VhsHandler = VhsHandler;
31438 Object.defineProperty(videojs__default['default'], 'HlsHandler', {
31439 get: function get() {
31440 videojs__default['default'].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
31441 return VhsHandler;
31442 },
31443 configurable: true
31444 });
31445 videojs__default['default'].VhsSourceHandler = VhsSourceHandler;
31446 Object.defineProperty(videojs__default['default'], 'HlsSourceHandler', {
31447 get: function get() {
31448 videojs__default['default'].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
31449 return VhsSourceHandler;
31450 },
31451 configurable: true
31452 });
31453 videojs__default['default'].Vhs = Vhs;
31454 Object.defineProperty(videojs__default['default'], 'Hls', {
31455 get: function get() {
31456 videojs__default['default'].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
31457 return Vhs;
31458 },
31459 configurable: true
31460 });
31461
31462 if (!videojs__default['default'].use) {
31463 videojs__default['default'].registerComponent('Hls', Vhs);
31464 videojs__default['default'].registerComponent('Vhs', Vhs);
31465 }
31466
31467 videojs__default['default'].options.vhs = videojs__default['default'].options.vhs || {};
31468 videojs__default['default'].options.hls = videojs__default['default'].options.hls || {};
31469
31470 if (videojs__default['default'].registerPlugin) {
31471 videojs__default['default'].registerPlugin('reloadSourceOnError', reloadSourceOnError);
31472 } else {
31473 videojs__default['default'].plugin('reloadSourceOnError', reloadSourceOnError);
31474 }
31475
31476 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
31477 exports.Vhs = Vhs;
31478 exports.VhsHandler = VhsHandler;
31479 exports.VhsSourceHandler = VhsSourceHandler;
31480 exports.emeKeySystems = emeKeySystems;
31481 exports.expandDataUri = expandDataUri;
31482 exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
31483 exports.setupEmeOptions = setupEmeOptions;
31484 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
31485 exports.waitForKeySessionCreation = waitForKeySessionCreation;
31486
31487 Object.defineProperty(exports, '__esModule', { value: true });
31488
31489})));