UNPKG

1.1 MBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.14.2 @license Apache-2.0 */
2(function (global, factory) {
3 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
4 typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
5 (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.httpStreaming = {}, global.videojs, global.window));
6})(this, (function (exports, videojs, xmldom) { 'use strict';
7
8 function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
10 var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
11
12 function createCommonjsModule(fn, basedir, module) {
13 return module = {
14 path: basedir,
15 exports: {},
16 require: function (path, base) {
17 return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
18 }
19 }, fn(module, module.exports), module.exports;
20 }
21
22 function commonjsRequire () {
23 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
24 }
25
26 var assertThisInitialized = createCommonjsModule(function (module) {
27 function _assertThisInitialized(self) {
28 if (self === void 0) {
29 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
30 }
31
32 return self;
33 }
34
35 module.exports = _assertThisInitialized;
36 module.exports["default"] = module.exports, module.exports.__esModule = true;
37 });
38
39 var setPrototypeOf = createCommonjsModule(function (module) {
40 function _setPrototypeOf(o, p) {
41 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
42 o.__proto__ = p;
43 return o;
44 };
45
46 module.exports["default"] = module.exports, module.exports.__esModule = true;
47 return _setPrototypeOf(o, p);
48 }
49
50 module.exports = _setPrototypeOf;
51 module.exports["default"] = module.exports, module.exports.__esModule = true;
52 });
53
54 var inheritsLoose = createCommonjsModule(function (module) {
55 function _inheritsLoose(subClass, superClass) {
56 subClass.prototype = Object.create(superClass.prototype);
57 subClass.prototype.constructor = subClass;
58 setPrototypeOf(subClass, superClass);
59 }
60
61 module.exports = _inheritsLoose;
62 module.exports["default"] = module.exports, module.exports.__esModule = true;
63 });
64
65 var urlToolkit = createCommonjsModule(function (module, exports) {
66 // see https://tools.ietf.org/html/rfc1808
67 (function (root) {
68 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
69 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
70 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
71 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
72 var URLToolkit = {
73 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
74 // E.g
75 // With opts.alwaysNormalize = false (default, spec compliant)
76 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
77 // With opts.alwaysNormalize = true (not spec compliant)
78 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
79 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
80 opts = opts || {}; // remove any remaining space and CRLF
81
82 baseURL = baseURL.trim();
83 relativeURL = relativeURL.trim();
84
85 if (!relativeURL) {
86 // 2a) If the embedded URL is entirely empty, it inherits the
87 // entire base URL (i.e., is set equal to the base URL)
88 // and we are done.
89 if (!opts.alwaysNormalize) {
90 return baseURL;
91 }
92
93 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
94
95 if (!basePartsForNormalise) {
96 throw new Error('Error trying to parse base URL.');
97 }
98
99 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
100 return URLToolkit.buildURLFromParts(basePartsForNormalise);
101 }
102
103 var relativeParts = URLToolkit.parseURL(relativeURL);
104
105 if (!relativeParts) {
106 throw new Error('Error trying to parse relative URL.');
107 }
108
109 if (relativeParts.scheme) {
110 // 2b) If the embedded URL starts with a scheme name, it is
111 // interpreted as an absolute URL and we are done.
112 if (!opts.alwaysNormalize) {
113 return relativeURL;
114 }
115
116 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
117 return URLToolkit.buildURLFromParts(relativeParts);
118 }
119
120 var baseParts = URLToolkit.parseURL(baseURL);
121
122 if (!baseParts) {
123 throw new Error('Error trying to parse base URL.');
124 }
125
126 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
127 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
128 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
129 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
130 baseParts.netLoc = pathParts[1];
131 baseParts.path = pathParts[2];
132 }
133
134 if (baseParts.netLoc && !baseParts.path) {
135 baseParts.path = '/';
136 }
137
138 var builtParts = {
139 // 2c) Otherwise, the embedded URL inherits the scheme of
140 // the base URL.
141 scheme: baseParts.scheme,
142 netLoc: relativeParts.netLoc,
143 path: null,
144 params: relativeParts.params,
145 query: relativeParts.query,
146 fragment: relativeParts.fragment
147 };
148
149 if (!relativeParts.netLoc) {
150 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
151 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
152 // (if any) of the base URL.
153 builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
154 // path is not relative and we skip to Step 7.
155
156 if (relativeParts.path[0] !== '/') {
157 if (!relativeParts.path) {
158 // 5) If the embedded URL path is empty (and not preceded by a
159 // slash), then the embedded URL inherits the base URL path
160 builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
161 // step 7; otherwise, it inherits the <params> of the base
162 // URL (if any) and
163
164 if (!relativeParts.params) {
165 builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
166 // step 7; otherwise, it inherits the <query> of the base
167 // URL (if any) and we skip to step 7.
168
169 if (!relativeParts.query) {
170 builtParts.query = baseParts.query;
171 }
172 }
173 } else {
174 // 6) The last segment of the base URL's path (anything
175 // following the rightmost slash "/", or the entire path if no
176 // slash is present) is removed and the embedded URL's path is
177 // appended in its place.
178 var baseURLPath = baseParts.path;
179 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
180 builtParts.path = URLToolkit.normalizePath(newPath);
181 }
182 }
183 }
184
185 if (builtParts.path === null) {
186 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
187 }
188
189 return URLToolkit.buildURLFromParts(builtParts);
190 },
191 parseURL: function parseURL(url) {
192 var parts = URL_REGEX.exec(url);
193
194 if (!parts) {
195 return null;
196 }
197
198 return {
199 scheme: parts[1] || '',
200 netLoc: parts[2] || '',
201 path: parts[3] || '',
202 params: parts[4] || '',
203 query: parts[5] || '',
204 fragment: parts[6] || ''
205 };
206 },
207 normalizePath: function normalizePath(path) {
208 // The following operations are
209 // then applied, in order, to the new path:
210 // 6a) All occurrences of "./", where "." is a complete path
211 // segment, are removed.
212 // 6b) If the path ends with "." as a complete path segment,
213 // that "." is removed.
214 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
215 // complete path segment not equal to "..", are removed.
216 // Removal of these path segments is performed iteratively,
217 // removing the leftmost matching pattern on each iteration,
218 // until no matching pattern remains.
219 // 6d) If the path ends with "<segment>/..", where <segment> is a
220 // complete path segment not equal to "..", that
221 // "<segment>/.." is removed.
222
223 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
224
225 return path.split('').reverse().join('');
226 },
227 buildURLFromParts: function buildURLFromParts(parts) {
228 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
229 }
230 };
231 module.exports = URLToolkit;
232 })();
233 });
234
235 var DEFAULT_LOCATION = 'http://example.com';
236
237 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
238 // return early if we don't need to resolve
239 if (/^[a-z]+:/i.test(relativeUrl)) {
240 return relativeUrl;
241 } // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
242
243
244 if (/^data:/.test(baseUrl)) {
245 baseUrl = window.location && window.location.href || '';
246 } // IE11 supports URL but not the URL constructor
247 // feature detect the behavior we want
248
249
250 var nativeURL = typeof window.URL === 'function';
251 var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
252 // and if baseUrl isn't an absolute url
253
254 var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
255
256 if (nativeURL) {
257 baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
258 } else if (!/\/\//i.test(baseUrl)) {
259 baseUrl = urlToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
260 }
261
262 if (nativeURL) {
263 var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
264 // and if we're location-less, remove the location
265 // otherwise, return the url unmodified
266
267 if (removeLocation) {
268 return newUrl.href.slice(DEFAULT_LOCATION.length);
269 } else if (protocolLess) {
270 return newUrl.href.slice(newUrl.protocol.length);
271 }
272
273 return newUrl.href;
274 }
275
276 return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
277 };
278
279 /**
280 * @file resolve-url.js - Handling how URLs are resolved and manipulated
281 */
282 var resolveUrl = resolveUrl$1;
283 /**
284 * Checks whether xhr request was redirected and returns correct url depending
285 * on `handleManifestRedirects` option
286 *
287 * @api private
288 *
289 * @param {string} url - an url being requested
290 * @param {XMLHttpRequest} req - xhr request result
291 *
292 * @return {string}
293 */
294
295 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
296 // To understand how the responseURL below is set and generated:
297 // - https://fetch.spec.whatwg.org/#concept-response-url
298 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
299 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
300 return req.responseURL;
301 }
302
303 return url;
304 };
305
306 var logger = function logger(source) {
307 if (videojs__default["default"].log.debug) {
308 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', source + " >");
309 }
310
311 return function () {};
312 };
313
314 var _extends_1 = createCommonjsModule(function (module) {
315 function _extends() {
316 module.exports = _extends = Object.assign || function (target) {
317 for (var i = 1; i < arguments.length; i++) {
318 var source = arguments[i];
319
320 for (var key in source) {
321 if (Object.prototype.hasOwnProperty.call(source, key)) {
322 target[key] = source[key];
323 }
324 }
325 }
326
327 return target;
328 };
329
330 module.exports["default"] = module.exports, module.exports.__esModule = true;
331 return _extends.apply(this, arguments);
332 }
333
334 module.exports = _extends;
335 module.exports["default"] = module.exports, module.exports.__esModule = true;
336 });
337
338 /**
339 * @file stream.js
340 */
341
342 /**
343 * A lightweight readable stream implemention that handles event dispatching.
344 *
345 * @class Stream
346 */
347 var Stream = /*#__PURE__*/function () {
348 function Stream() {
349 this.listeners = {};
350 }
351 /**
352 * Add a listener for a specified event type.
353 *
354 * @param {string} type the event name
355 * @param {Function} listener the callback to be invoked when an event of
356 * the specified type occurs
357 */
358
359
360 var _proto = Stream.prototype;
361
362 _proto.on = function on(type, listener) {
363 if (!this.listeners[type]) {
364 this.listeners[type] = [];
365 }
366
367 this.listeners[type].push(listener);
368 }
369 /**
370 * Remove a listener for a specified event type.
371 *
372 * @param {string} type the event name
373 * @param {Function} listener a function previously registered for this
374 * type of event through `on`
375 * @return {boolean} if we could turn it off or not
376 */
377 ;
378
379 _proto.off = function off(type, listener) {
380 if (!this.listeners[type]) {
381 return false;
382 }
383
384 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
385 // In Video.js we slice listener functions
386 // on trigger so that it does not mess up the order
387 // while we loop through.
388 //
389 // Here we slice on off so that the loop in trigger
390 // can continue using it's old reference to loop without
391 // messing up the order.
392
393 this.listeners[type] = this.listeners[type].slice(0);
394 this.listeners[type].splice(index, 1);
395 return index > -1;
396 }
397 /**
398 * Trigger an event of the specified type on this stream. Any additional
399 * arguments to this function are passed as parameters to event listeners.
400 *
401 * @param {string} type the event name
402 */
403 ;
404
405 _proto.trigger = function trigger(type) {
406 var callbacks = this.listeners[type];
407
408 if (!callbacks) {
409 return;
410 } // Slicing the arguments on every invocation of this method
411 // can add a significant amount of overhead. Avoid the
412 // intermediate object creation for the common case of a
413 // single callback argument
414
415
416 if (arguments.length === 2) {
417 var length = callbacks.length;
418
419 for (var i = 0; i < length; ++i) {
420 callbacks[i].call(this, arguments[1]);
421 }
422 } else {
423 var args = Array.prototype.slice.call(arguments, 1);
424 var _length = callbacks.length;
425
426 for (var _i = 0; _i < _length; ++_i) {
427 callbacks[_i].apply(this, args);
428 }
429 }
430 }
431 /**
432 * Destroys the stream and cleans up.
433 */
434 ;
435
436 _proto.dispose = function dispose() {
437 this.listeners = {};
438 }
439 /**
440 * Forwards all `data` events on this stream to the destination stream. The
441 * destination stream should provide a method `push` to receive the data
442 * events as they arrive.
443 *
444 * @param {Stream} destination the stream that will receive all `data` events
445 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
446 */
447 ;
448
449 _proto.pipe = function pipe(destination) {
450 this.on('data', function (data) {
451 destination.push(data);
452 });
453 };
454
455 return Stream;
456 }();
457
458 var atob = function atob(s) {
459 return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
460 };
461
462 function decodeB64ToUint8Array(b64Text) {
463 var decodedString = atob(b64Text);
464 var array = new Uint8Array(decodedString.length);
465
466 for (var i = 0; i < decodedString.length; i++) {
467 array[i] = decodedString.charCodeAt(i);
468 }
469
470 return array;
471 }
472
473 /*! @name m3u8-parser @version 4.7.1 @license Apache-2.0 */
474 /**
475 * A stream that buffers string input and generates a `data` event for each
476 * line.
477 *
478 * @class LineStream
479 * @extends Stream
480 */
481
482 var LineStream = /*#__PURE__*/function (_Stream) {
483 inheritsLoose(LineStream, _Stream);
484
485 function LineStream() {
486 var _this;
487
488 _this = _Stream.call(this) || this;
489 _this.buffer = '';
490 return _this;
491 }
492 /**
493 * Add new data to be parsed.
494 *
495 * @param {string} data the text to process
496 */
497
498
499 var _proto = LineStream.prototype;
500
501 _proto.push = function push(data) {
502 var nextNewline;
503 this.buffer += data;
504 nextNewline = this.buffer.indexOf('\n');
505
506 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
507 this.trigger('data', this.buffer.substring(0, nextNewline));
508 this.buffer = this.buffer.substring(nextNewline + 1);
509 }
510 };
511
512 return LineStream;
513 }(Stream);
514
515 var TAB = String.fromCharCode(0x09);
516
517 var parseByterange = function parseByterange(byterangeString) {
518 // optionally match and capture 0+ digits before `@`
519 // optionally match and capture 0+ digits after `@`
520 var match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
521 var result = {};
522
523 if (match[1]) {
524 result.length = parseInt(match[1], 10);
525 }
526
527 if (match[2]) {
528 result.offset = parseInt(match[2], 10);
529 }
530
531 return result;
532 };
533 /**
534 * "forgiving" attribute list psuedo-grammar:
535 * attributes -> keyvalue (',' keyvalue)*
536 * keyvalue -> key '=' value
537 * key -> [^=]*
538 * value -> '"' [^"]* '"' | [^,]*
539 */
540
541
542 var attributeSeparator = function attributeSeparator() {
543 var key = '[^=]*';
544 var value = '"[^"]*"|[^,]*';
545 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
546 return new RegExp('(?:^|,)(' + keyvalue + ')');
547 };
548 /**
549 * Parse attributes from a line given the separator
550 *
551 * @param {string} attributes the attribute line to parse
552 */
553
554
555 var parseAttributes$1 = function parseAttributes(attributes) {
556 // split the string using attributes as the separator
557 var attrs = attributes.split(attributeSeparator());
558 var result = {};
559 var i = attrs.length;
560 var attr;
561
562 while (i--) {
563 // filter out unmatched portions of the string
564 if (attrs[i] === '') {
565 continue;
566 } // split the key and value
567
568
569 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
570
571 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
572 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
573 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
574 result[attr[0]] = attr[1];
575 }
576
577 return result;
578 };
579 /**
580 * A line-level M3U8 parser event stream. It expects to receive input one
581 * line at a time and performs a context-free parse of its contents. A stream
582 * interpretation of a manifest can be useful if the manifest is expected to
583 * be too large to fit comfortably into memory or the entirety of the input
584 * is not immediately available. Otherwise, it's probably much easier to work
585 * with a regular `Parser` object.
586 *
587 * Produces `data` events with an object that captures the parser's
588 * interpretation of the input. That object has a property `tag` that is one
589 * of `uri`, `comment`, or `tag`. URIs only have a single additional
590 * property, `line`, which captures the entirety of the input without
591 * interpretation. Comments similarly have a single additional property
592 * `text` which is the input without the leading `#`.
593 *
594 * Tags always have a property `tagType` which is the lower-cased version of
595 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
596 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
597 * tags are given the tag type `unknown` and a single additional property
598 * `data` with the remainder of the input.
599 *
600 * @class ParseStream
601 * @extends Stream
602 */
603
604
605 var ParseStream = /*#__PURE__*/function (_Stream) {
606 inheritsLoose(ParseStream, _Stream);
607
608 function ParseStream() {
609 var _this;
610
611 _this = _Stream.call(this) || this;
612 _this.customParsers = [];
613 _this.tagMappers = [];
614 return _this;
615 }
616 /**
617 * Parses an additional line of input.
618 *
619 * @param {string} line a single line of an M3U8 file to parse
620 */
621
622
623 var _proto = ParseStream.prototype;
624
625 _proto.push = function push(line) {
626 var _this2 = this;
627
628 var match;
629 var event; // strip whitespace
630
631 line = line.trim();
632
633 if (line.length === 0) {
634 // ignore empty lines
635 return;
636 } // URIs
637
638
639 if (line[0] !== '#') {
640 this.trigger('data', {
641 type: 'uri',
642 uri: line
643 });
644 return;
645 } // map tags
646
647
648 var newLines = this.tagMappers.reduce(function (acc, mapper) {
649 var mappedLine = mapper(line); // skip if unchanged
650
651 if (mappedLine === line) {
652 return acc;
653 }
654
655 return acc.concat([mappedLine]);
656 }, [line]);
657 newLines.forEach(function (newLine) {
658 for (var i = 0; i < _this2.customParsers.length; i++) {
659 if (_this2.customParsers[i].call(_this2, newLine)) {
660 return;
661 }
662 } // Comments
663
664
665 if (newLine.indexOf('#EXT') !== 0) {
666 _this2.trigger('data', {
667 type: 'comment',
668 text: newLine.slice(1)
669 });
670
671 return;
672 } // strip off any carriage returns here so the regex matching
673 // doesn't have to account for them.
674
675
676 newLine = newLine.replace('\r', ''); // Tags
677
678 match = /^#EXTM3U/.exec(newLine);
679
680 if (match) {
681 _this2.trigger('data', {
682 type: 'tag',
683 tagType: 'm3u'
684 });
685
686 return;
687 }
688
689 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
690
691 if (match) {
692 event = {
693 type: 'tag',
694 tagType: 'inf'
695 };
696
697 if (match[1]) {
698 event.duration = parseFloat(match[1]);
699 }
700
701 if (match[2]) {
702 event.title = match[2];
703 }
704
705 _this2.trigger('data', event);
706
707 return;
708 }
709
710 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
711
712 if (match) {
713 event = {
714 type: 'tag',
715 tagType: 'targetduration'
716 };
717
718 if (match[1]) {
719 event.duration = parseInt(match[1], 10);
720 }
721
722 _this2.trigger('data', event);
723
724 return;
725 }
726
727 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
728
729 if (match) {
730 event = {
731 type: 'tag',
732 tagType: 'version'
733 };
734
735 if (match[1]) {
736 event.version = parseInt(match[1], 10);
737 }
738
739 _this2.trigger('data', event);
740
741 return;
742 }
743
744 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
745
746 if (match) {
747 event = {
748 type: 'tag',
749 tagType: 'media-sequence'
750 };
751
752 if (match[1]) {
753 event.number = parseInt(match[1], 10);
754 }
755
756 _this2.trigger('data', event);
757
758 return;
759 }
760
761 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
762
763 if (match) {
764 event = {
765 type: 'tag',
766 tagType: 'discontinuity-sequence'
767 };
768
769 if (match[1]) {
770 event.number = parseInt(match[1], 10);
771 }
772
773 _this2.trigger('data', event);
774
775 return;
776 }
777
778 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
779
780 if (match) {
781 event = {
782 type: 'tag',
783 tagType: 'playlist-type'
784 };
785
786 if (match[1]) {
787 event.playlistType = match[1];
788 }
789
790 _this2.trigger('data', event);
791
792 return;
793 }
794
795 match = /^#EXT-X-BYTERANGE:?(.*)?$/.exec(newLine);
796
797 if (match) {
798 event = _extends_1(parseByterange(match[1]), {
799 type: 'tag',
800 tagType: 'byterange'
801 });
802
803 _this2.trigger('data', event);
804
805 return;
806 }
807
808 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
809
810 if (match) {
811 event = {
812 type: 'tag',
813 tagType: 'allow-cache'
814 };
815
816 if (match[1]) {
817 event.allowed = !/NO/.test(match[1]);
818 }
819
820 _this2.trigger('data', event);
821
822 return;
823 }
824
825 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
826
827 if (match) {
828 event = {
829 type: 'tag',
830 tagType: 'map'
831 };
832
833 if (match[1]) {
834 var attributes = parseAttributes$1(match[1]);
835
836 if (attributes.URI) {
837 event.uri = attributes.URI;
838 }
839
840 if (attributes.BYTERANGE) {
841 event.byterange = parseByterange(attributes.BYTERANGE);
842 }
843 }
844
845 _this2.trigger('data', event);
846
847 return;
848 }
849
850 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
851
852 if (match) {
853 event = {
854 type: 'tag',
855 tagType: 'stream-inf'
856 };
857
858 if (match[1]) {
859 event.attributes = parseAttributes$1(match[1]);
860
861 if (event.attributes.RESOLUTION) {
862 var split = event.attributes.RESOLUTION.split('x');
863 var resolution = {};
864
865 if (split[0]) {
866 resolution.width = parseInt(split[0], 10);
867 }
868
869 if (split[1]) {
870 resolution.height = parseInt(split[1], 10);
871 }
872
873 event.attributes.RESOLUTION = resolution;
874 }
875
876 if (event.attributes.BANDWIDTH) {
877 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
878 }
879
880 if (event.attributes['PROGRAM-ID']) {
881 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
882 }
883 }
884
885 _this2.trigger('data', event);
886
887 return;
888 }
889
890 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
891
892 if (match) {
893 event = {
894 type: 'tag',
895 tagType: 'media'
896 };
897
898 if (match[1]) {
899 event.attributes = parseAttributes$1(match[1]);
900 }
901
902 _this2.trigger('data', event);
903
904 return;
905 }
906
907 match = /^#EXT-X-ENDLIST/.exec(newLine);
908
909 if (match) {
910 _this2.trigger('data', {
911 type: 'tag',
912 tagType: 'endlist'
913 });
914
915 return;
916 }
917
918 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
919
920 if (match) {
921 _this2.trigger('data', {
922 type: 'tag',
923 tagType: 'discontinuity'
924 });
925
926 return;
927 }
928
929 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
930
931 if (match) {
932 event = {
933 type: 'tag',
934 tagType: 'program-date-time'
935 };
936
937 if (match[1]) {
938 event.dateTimeString = match[1];
939 event.dateTimeObject = new Date(match[1]);
940 }
941
942 _this2.trigger('data', event);
943
944 return;
945 }
946
947 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
948
949 if (match) {
950 event = {
951 type: 'tag',
952 tagType: 'key'
953 };
954
955 if (match[1]) {
956 event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
957
958 if (event.attributes.IV) {
959 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
960 event.attributes.IV = event.attributes.IV.substring(2);
961 }
962
963 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
964 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
965 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
966 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
967 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
968 event.attributes.IV = new Uint32Array(event.attributes.IV);
969 }
970 }
971
972 _this2.trigger('data', event);
973
974 return;
975 }
976
977 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
978
979 if (match) {
980 event = {
981 type: 'tag',
982 tagType: 'start'
983 };
984
985 if (match[1]) {
986 event.attributes = parseAttributes$1(match[1]);
987 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
988 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
989 }
990
991 _this2.trigger('data', event);
992
993 return;
994 }
995
996 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
997
998 if (match) {
999 event = {
1000 type: 'tag',
1001 tagType: 'cue-out-cont'
1002 };
1003
1004 if (match[1]) {
1005 event.data = match[1];
1006 } else {
1007 event.data = '';
1008 }
1009
1010 _this2.trigger('data', event);
1011
1012 return;
1013 }
1014
1015 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
1016
1017 if (match) {
1018 event = {
1019 type: 'tag',
1020 tagType: 'cue-out'
1021 };
1022
1023 if (match[1]) {
1024 event.data = match[1];
1025 } else {
1026 event.data = '';
1027 }
1028
1029 _this2.trigger('data', event);
1030
1031 return;
1032 }
1033
1034 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
1035
1036 if (match) {
1037 event = {
1038 type: 'tag',
1039 tagType: 'cue-in'
1040 };
1041
1042 if (match[1]) {
1043 event.data = match[1];
1044 } else {
1045 event.data = '';
1046 }
1047
1048 _this2.trigger('data', event);
1049
1050 return;
1051 }
1052
1053 match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
1054
1055 if (match && match[1]) {
1056 event = {
1057 type: 'tag',
1058 tagType: 'skip'
1059 };
1060 event.attributes = parseAttributes$1(match[1]);
1061
1062 if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
1063 event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
1064 }
1065
1066 if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
1067 event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
1068 }
1069
1070 _this2.trigger('data', event);
1071
1072 return;
1073 }
1074
1075 match = /^#EXT-X-PART:(.*)$/.exec(newLine);
1076
1077 if (match && match[1]) {
1078 event = {
1079 type: 'tag',
1080 tagType: 'part'
1081 };
1082 event.attributes = parseAttributes$1(match[1]);
1083 ['DURATION'].forEach(function (key) {
1084 if (event.attributes.hasOwnProperty(key)) {
1085 event.attributes[key] = parseFloat(event.attributes[key]);
1086 }
1087 });
1088 ['INDEPENDENT', 'GAP'].forEach(function (key) {
1089 if (event.attributes.hasOwnProperty(key)) {
1090 event.attributes[key] = /YES/.test(event.attributes[key]);
1091 }
1092 });
1093
1094 if (event.attributes.hasOwnProperty('BYTERANGE')) {
1095 event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
1096 }
1097
1098 _this2.trigger('data', event);
1099
1100 return;
1101 }
1102
1103 match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
1104
1105 if (match && match[1]) {
1106 event = {
1107 type: 'tag',
1108 tagType: 'server-control'
1109 };
1110 event.attributes = parseAttributes$1(match[1]);
1111 ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
1112 if (event.attributes.hasOwnProperty(key)) {
1113 event.attributes[key] = parseFloat(event.attributes[key]);
1114 }
1115 });
1116 ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
1117 if (event.attributes.hasOwnProperty(key)) {
1118 event.attributes[key] = /YES/.test(event.attributes[key]);
1119 }
1120 });
1121
1122 _this2.trigger('data', event);
1123
1124 return;
1125 }
1126
1127 match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
1128
1129 if (match && match[1]) {
1130 event = {
1131 type: 'tag',
1132 tagType: 'part-inf'
1133 };
1134 event.attributes = parseAttributes$1(match[1]);
1135 ['PART-TARGET'].forEach(function (key) {
1136 if (event.attributes.hasOwnProperty(key)) {
1137 event.attributes[key] = parseFloat(event.attributes[key]);
1138 }
1139 });
1140
1141 _this2.trigger('data', event);
1142
1143 return;
1144 }
1145
1146 match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
1147
1148 if (match && match[1]) {
1149 event = {
1150 type: 'tag',
1151 tagType: 'preload-hint'
1152 };
1153 event.attributes = parseAttributes$1(match[1]);
1154 ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
1155 if (event.attributes.hasOwnProperty(key)) {
1156 event.attributes[key] = parseInt(event.attributes[key], 10);
1157 var subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
1158 event.attributes.byterange = event.attributes.byterange || {};
1159 event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
1160
1161 delete event.attributes[key];
1162 }
1163 });
1164
1165 _this2.trigger('data', event);
1166
1167 return;
1168 }
1169
1170 match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
1171
1172 if (match && match[1]) {
1173 event = {
1174 type: 'tag',
1175 tagType: 'rendition-report'
1176 };
1177 event.attributes = parseAttributes$1(match[1]);
1178 ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
1179 if (event.attributes.hasOwnProperty(key)) {
1180 event.attributes[key] = parseInt(event.attributes[key], 10);
1181 }
1182 });
1183
1184 _this2.trigger('data', event);
1185
1186 return;
1187 } // unknown tag type
1188
1189
1190 _this2.trigger('data', {
1191 type: 'tag',
1192 data: newLine.slice(4)
1193 });
1194 });
1195 }
1196 /**
1197 * Add a parser for custom headers
1198 *
1199 * @param {Object} options a map of options for the added parser
1200 * @param {RegExp} options.expression a regular expression to match the custom header
1201 * @param {string} options.customType the custom type to register to the output
1202 * @param {Function} [options.dataParser] function to parse the line into an object
1203 * @param {boolean} [options.segment] should tag data be attached to the segment object
1204 */
1205 ;
1206
1207 _proto.addParser = function addParser(_ref) {
1208 var _this3 = this;
1209
1210 var expression = _ref.expression,
1211 customType = _ref.customType,
1212 dataParser = _ref.dataParser,
1213 segment = _ref.segment;
1214
1215 if (typeof dataParser !== 'function') {
1216 dataParser = function dataParser(line) {
1217 return line;
1218 };
1219 }
1220
1221 this.customParsers.push(function (line) {
1222 var match = expression.exec(line);
1223
1224 if (match) {
1225 _this3.trigger('data', {
1226 type: 'custom',
1227 data: dataParser(line),
1228 customType: customType,
1229 segment: segment
1230 });
1231
1232 return true;
1233 }
1234 });
1235 }
1236 /**
1237 * Add a custom header mapper
1238 *
1239 * @param {Object} options
1240 * @param {RegExp} options.expression a regular expression to match the custom header
1241 * @param {Function} options.map function to translate tag into a different tag
1242 */
1243 ;
1244
1245 _proto.addTagMapper = function addTagMapper(_ref2) {
1246 var expression = _ref2.expression,
1247 map = _ref2.map;
1248
1249 var mapFn = function mapFn(line) {
1250 if (expression.test(line)) {
1251 return map(line);
1252 }
1253
1254 return line;
1255 };
1256
1257 this.tagMappers.push(mapFn);
1258 };
1259
1260 return ParseStream;
1261 }(Stream);
1262
1263 var camelCase = function camelCase(str) {
1264 return str.toLowerCase().replace(/-(\w)/g, function (a) {
1265 return a[1].toUpperCase();
1266 });
1267 };
1268
1269 var camelCaseKeys = function camelCaseKeys(attributes) {
1270 var result = {};
1271 Object.keys(attributes).forEach(function (key) {
1272 result[camelCase(key)] = attributes[key];
1273 });
1274 return result;
1275 }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
1276 // we need this helper because defaults are based upon targetDuration and
1277 // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
1278 // target durations are set.
1279
1280
1281 var setHoldBack = function setHoldBack(manifest) {
1282 var serverControl = manifest.serverControl,
1283 targetDuration = manifest.targetDuration,
1284 partTargetDuration = manifest.partTargetDuration;
1285
1286 if (!serverControl) {
1287 return;
1288 }
1289
1290 var tag = '#EXT-X-SERVER-CONTROL';
1291 var hb = 'holdBack';
1292 var phb = 'partHoldBack';
1293 var minTargetDuration = targetDuration && targetDuration * 3;
1294 var minPartDuration = partTargetDuration && partTargetDuration * 2;
1295
1296 if (targetDuration && !serverControl.hasOwnProperty(hb)) {
1297 serverControl[hb] = minTargetDuration;
1298 this.trigger('info', {
1299 message: tag + " defaulting HOLD-BACK to targetDuration * 3 (" + minTargetDuration + ")."
1300 });
1301 }
1302
1303 if (minTargetDuration && serverControl[hb] < minTargetDuration) {
1304 this.trigger('warn', {
1305 message: tag + " clamping HOLD-BACK (" + serverControl[hb] + ") to targetDuration * 3 (" + minTargetDuration + ")"
1306 });
1307 serverControl[hb] = minTargetDuration;
1308 } // default no part hold back to part target duration * 3
1309
1310
1311 if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
1312 serverControl[phb] = partTargetDuration * 3;
1313 this.trigger('info', {
1314 message: tag + " defaulting PART-HOLD-BACK to partTargetDuration * 3 (" + serverControl[phb] + ")."
1315 });
1316 } // if part hold back is too small default it to part target duration * 2
1317
1318
1319 if (partTargetDuration && serverControl[phb] < minPartDuration) {
1320 this.trigger('warn', {
1321 message: tag + " clamping PART-HOLD-BACK (" + serverControl[phb] + ") to partTargetDuration * 2 (" + minPartDuration + ")."
1322 });
1323 serverControl[phb] = minPartDuration;
1324 }
1325 };
1326 /**
1327 * A parser for M3U8 files. The current interpretation of the input is
1328 * exposed as a property `manifest` on parser objects. It's just two lines to
1329 * create and parse a manifest once you have the contents available as a string:
1330 *
1331 * ```js
1332 * var parser = new m3u8.Parser();
1333 * parser.push(xhr.responseText);
1334 * ```
1335 *
1336 * New input can later be applied to update the manifest object by calling
1337 * `push` again.
1338 *
1339 * The parser attempts to create a usable manifest object even if the
1340 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1341 * events during the parse if it encounters input that seems invalid or
1342 * requires some property of the manifest object to be defaulted.
1343 *
1344 * @class Parser
1345 * @extends Stream
1346 */
1347
1348
1349 var Parser = /*#__PURE__*/function (_Stream) {
1350 inheritsLoose(Parser, _Stream);
1351
1352 function Parser() {
1353 var _this;
1354
1355 _this = _Stream.call(this) || this;
1356 _this.lineStream = new LineStream();
1357 _this.parseStream = new ParseStream();
1358
1359 _this.lineStream.pipe(_this.parseStream);
1360 /* eslint-disable consistent-this */
1361
1362
1363 var self = assertThisInitialized(_this);
1364 /* eslint-enable consistent-this */
1365
1366
1367 var uris = [];
1368 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1369
1370 var currentMap; // if specified, the active decryption key
1371
1372 var _key;
1373
1374 var hasParts = false;
1375
1376 var noop = function noop() {};
1377
1378 var defaultMediaGroups = {
1379 'AUDIO': {},
1380 'VIDEO': {},
1381 'CLOSED-CAPTIONS': {},
1382 'SUBTITLES': {}
1383 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1384 // used in MPDs with Widevine encrypted streams.
1385
1386 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1387
1388 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1389
1390 _this.manifest = {
1391 allowCache: true,
1392 discontinuityStarts: [],
1393 segments: []
1394 }; // keep track of the last seen segment's byte range end, as segments are not required
1395 // to provide the offset, in which case it defaults to the next byte after the
1396 // previous segment
1397
1398 var lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
1399
1400 var lastPartByterangeEnd = 0;
1401
1402 _this.on('end', function () {
1403 // only add preloadSegment if we don't yet have a uri for it.
1404 // and we actually have parts/preloadHints
1405 if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
1406 return;
1407 }
1408
1409 if (!currentUri.map && currentMap) {
1410 currentUri.map = currentMap;
1411 }
1412
1413 if (!currentUri.key && _key) {
1414 currentUri.key = _key;
1415 }
1416
1417 if (!currentUri.timeline && typeof currentTimeline === 'number') {
1418 currentUri.timeline = currentTimeline;
1419 }
1420
1421 _this.manifest.preloadSegment = currentUri;
1422 }); // update the manifest with the m3u8 entry from the parse stream
1423
1424
1425 _this.parseStream.on('data', function (entry) {
1426 var mediaGroup;
1427 var rendition;
1428 ({
1429 tag: function tag() {
1430 // switch based on the tag type
1431 (({
1432 version: function version() {
1433 if (entry.version) {
1434 this.manifest.version = entry.version;
1435 }
1436 },
1437 'allow-cache': function allowCache() {
1438 this.manifest.allowCache = entry.allowed;
1439
1440 if (!('allowed' in entry)) {
1441 this.trigger('info', {
1442 message: 'defaulting allowCache to YES'
1443 });
1444 this.manifest.allowCache = true;
1445 }
1446 },
1447 byterange: function byterange() {
1448 var byterange = {};
1449
1450 if ('length' in entry) {
1451 currentUri.byterange = byterange;
1452 byterange.length = entry.length;
1453
1454 if (!('offset' in entry)) {
1455 /*
1456 * From the latest spec (as of this writing):
1457 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
1458 *
1459 * Same text since EXT-X-BYTERANGE's introduction in draft 7:
1460 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
1461 *
1462 * "If o [offset] is not present, the sub-range begins at the next byte
1463 * following the sub-range of the previous media segment."
1464 */
1465 entry.offset = lastByterangeEnd;
1466 }
1467 }
1468
1469 if ('offset' in entry) {
1470 currentUri.byterange = byterange;
1471 byterange.offset = entry.offset;
1472 }
1473
1474 lastByterangeEnd = byterange.offset + byterange.length;
1475 },
1476 endlist: function endlist() {
1477 this.manifest.endList = true;
1478 },
1479 inf: function inf() {
1480 if (!('mediaSequence' in this.manifest)) {
1481 this.manifest.mediaSequence = 0;
1482 this.trigger('info', {
1483 message: 'defaulting media sequence to zero'
1484 });
1485 }
1486
1487 if (!('discontinuitySequence' in this.manifest)) {
1488 this.manifest.discontinuitySequence = 0;
1489 this.trigger('info', {
1490 message: 'defaulting discontinuity sequence to zero'
1491 });
1492 }
1493
1494 if (entry.duration > 0) {
1495 currentUri.duration = entry.duration;
1496 }
1497
1498 if (entry.duration === 0) {
1499 currentUri.duration = 0.01;
1500 this.trigger('info', {
1501 message: 'updating zero segment duration to a small value'
1502 });
1503 }
1504
1505 this.manifest.segments = uris;
1506 },
1507 key: function key() {
1508 if (!entry.attributes) {
1509 this.trigger('warn', {
1510 message: 'ignoring key declaration without attribute list'
1511 });
1512 return;
1513 } // clear the active encryption key
1514
1515
1516 if (entry.attributes.METHOD === 'NONE') {
1517 _key = null;
1518 return;
1519 }
1520
1521 if (!entry.attributes.URI) {
1522 this.trigger('warn', {
1523 message: 'ignoring key declaration without URI'
1524 });
1525 return;
1526 }
1527
1528 if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {
1529 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1530
1531 this.manifest.contentProtection['com.apple.fps.1_0'] = {
1532 attributes: entry.attributes
1533 };
1534 return;
1535 }
1536
1537 if (entry.attributes.KEYFORMAT === 'com.microsoft.playready') {
1538 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1539
1540 this.manifest.contentProtection['com.microsoft.playready'] = {
1541 uri: entry.attributes.URI
1542 };
1543 return;
1544 } // check if the content is encrypted for Widevine
1545 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1546
1547
1548 if (entry.attributes.KEYFORMAT === widevineUuid) {
1549 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1550
1551 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1552 this.trigger('warn', {
1553 message: 'invalid key method provided for Widevine'
1554 });
1555 return;
1556 }
1557
1558 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1559 this.trigger('warn', {
1560 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1561 });
1562 }
1563
1564 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1565 this.trigger('warn', {
1566 message: 'invalid key URI provided for Widevine'
1567 });
1568 return;
1569 }
1570
1571 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1572 this.trigger('warn', {
1573 message: 'invalid key ID provided for Widevine'
1574 });
1575 return;
1576 } // if Widevine key attributes are valid, store them as `contentProtection`
1577 // on the manifest to emulate Widevine tag structure in a DASH mpd
1578
1579
1580 this.manifest.contentProtection = this.manifest.contentProtection || {};
1581 this.manifest.contentProtection['com.widevine.alpha'] = {
1582 attributes: {
1583 schemeIdUri: entry.attributes.KEYFORMAT,
1584 // remove '0x' from the key id string
1585 keyId: entry.attributes.KEYID.substring(2)
1586 },
1587 // decode the base64-encoded PSSH box
1588 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1589 };
1590 return;
1591 }
1592
1593 if (!entry.attributes.METHOD) {
1594 this.trigger('warn', {
1595 message: 'defaulting key method to AES-128'
1596 });
1597 } // setup an encryption key for upcoming segments
1598
1599
1600 _key = {
1601 method: entry.attributes.METHOD || 'AES-128',
1602 uri: entry.attributes.URI
1603 };
1604
1605 if (typeof entry.attributes.IV !== 'undefined') {
1606 _key.iv = entry.attributes.IV;
1607 }
1608 },
1609 'media-sequence': function mediaSequence() {
1610 if (!isFinite(entry.number)) {
1611 this.trigger('warn', {
1612 message: 'ignoring invalid media sequence: ' + entry.number
1613 });
1614 return;
1615 }
1616
1617 this.manifest.mediaSequence = entry.number;
1618 },
1619 'discontinuity-sequence': function discontinuitySequence() {
1620 if (!isFinite(entry.number)) {
1621 this.trigger('warn', {
1622 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1623 });
1624 return;
1625 }
1626
1627 this.manifest.discontinuitySequence = entry.number;
1628 currentTimeline = entry.number;
1629 },
1630 'playlist-type': function playlistType() {
1631 if (!/VOD|EVENT/.test(entry.playlistType)) {
1632 this.trigger('warn', {
1633 message: 'ignoring unknown playlist type: ' + entry.playlist
1634 });
1635 return;
1636 }
1637
1638 this.manifest.playlistType = entry.playlistType;
1639 },
1640 map: function map() {
1641 currentMap = {};
1642
1643 if (entry.uri) {
1644 currentMap.uri = entry.uri;
1645 }
1646
1647 if (entry.byterange) {
1648 currentMap.byterange = entry.byterange;
1649 }
1650
1651 if (_key) {
1652 currentMap.key = _key;
1653 }
1654 },
1655 'stream-inf': function streamInf() {
1656 this.manifest.playlists = uris;
1657 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1658
1659 if (!entry.attributes) {
1660 this.trigger('warn', {
1661 message: 'ignoring empty stream-inf attributes'
1662 });
1663 return;
1664 }
1665
1666 if (!currentUri.attributes) {
1667 currentUri.attributes = {};
1668 }
1669
1670 _extends_1(currentUri.attributes, entry.attributes);
1671 },
1672 media: function media() {
1673 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1674
1675 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1676 this.trigger('warn', {
1677 message: 'ignoring incomplete or missing media group'
1678 });
1679 return;
1680 } // find the media group, creating defaults as necessary
1681
1682
1683 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1684 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1685 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1686
1687 rendition = {
1688 default: /yes/i.test(entry.attributes.DEFAULT)
1689 };
1690
1691 if (rendition.default) {
1692 rendition.autoselect = true;
1693 } else {
1694 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1695 }
1696
1697 if (entry.attributes.LANGUAGE) {
1698 rendition.language = entry.attributes.LANGUAGE;
1699 }
1700
1701 if (entry.attributes.URI) {
1702 rendition.uri = entry.attributes.URI;
1703 }
1704
1705 if (entry.attributes['INSTREAM-ID']) {
1706 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1707 }
1708
1709 if (entry.attributes.CHARACTERISTICS) {
1710 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1711 }
1712
1713 if (entry.attributes.FORCED) {
1714 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1715 } // insert the new rendition
1716
1717
1718 mediaGroup[entry.attributes.NAME] = rendition;
1719 },
1720 discontinuity: function discontinuity() {
1721 currentTimeline += 1;
1722 currentUri.discontinuity = true;
1723 this.manifest.discontinuityStarts.push(uris.length);
1724 },
1725 'program-date-time': function programDateTime() {
1726 if (typeof this.manifest.dateTimeString === 'undefined') {
1727 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1728 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1729 // to the manifest object
1730 // TODO: Consider removing this in future major version
1731 this.manifest.dateTimeString = entry.dateTimeString;
1732 this.manifest.dateTimeObject = entry.dateTimeObject;
1733 }
1734
1735 currentUri.dateTimeString = entry.dateTimeString;
1736 currentUri.dateTimeObject = entry.dateTimeObject;
1737 },
1738 targetduration: function targetduration() {
1739 if (!isFinite(entry.duration) || entry.duration < 0) {
1740 this.trigger('warn', {
1741 message: 'ignoring invalid target duration: ' + entry.duration
1742 });
1743 return;
1744 }
1745
1746 this.manifest.targetDuration = entry.duration;
1747 setHoldBack.call(this, this.manifest);
1748 },
1749 start: function start() {
1750 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1751 this.trigger('warn', {
1752 message: 'ignoring start declaration without appropriate attribute list'
1753 });
1754 return;
1755 }
1756
1757 this.manifest.start = {
1758 timeOffset: entry.attributes['TIME-OFFSET'],
1759 precise: entry.attributes.PRECISE
1760 };
1761 },
1762 'cue-out': function cueOut() {
1763 currentUri.cueOut = entry.data;
1764 },
1765 'cue-out-cont': function cueOutCont() {
1766 currentUri.cueOutCont = entry.data;
1767 },
1768 'cue-in': function cueIn() {
1769 currentUri.cueIn = entry.data;
1770 },
1771 'skip': function skip() {
1772 this.manifest.skip = camelCaseKeys(entry.attributes);
1773 this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
1774 },
1775 'part': function part() {
1776 var _this2 = this;
1777
1778 hasParts = true; // parts are always specifed before a segment
1779
1780 var segmentIndex = this.manifest.segments.length;
1781 var part = camelCaseKeys(entry.attributes);
1782 currentUri.parts = currentUri.parts || [];
1783 currentUri.parts.push(part);
1784
1785 if (part.byterange) {
1786 if (!part.byterange.hasOwnProperty('offset')) {
1787 part.byterange.offset = lastPartByterangeEnd;
1788 }
1789
1790 lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
1791 }
1792
1793 var partIndex = currentUri.parts.length - 1;
1794 this.warnOnMissingAttributes_("#EXT-X-PART #" + partIndex + " for segment #" + segmentIndex, entry.attributes, ['URI', 'DURATION']);
1795
1796 if (this.manifest.renditionReports) {
1797 this.manifest.renditionReports.forEach(function (r, i) {
1798 if (!r.hasOwnProperty('lastPart')) {
1799 _this2.trigger('warn', {
1800 message: "#EXT-X-RENDITION-REPORT #" + i + " lacks required attribute(s): LAST-PART"
1801 });
1802 }
1803 });
1804 }
1805 },
1806 'server-control': function serverControl() {
1807 var attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
1808
1809 if (!attrs.hasOwnProperty('canBlockReload')) {
1810 attrs.canBlockReload = false;
1811 this.trigger('info', {
1812 message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
1813 });
1814 }
1815
1816 setHoldBack.call(this, this.manifest);
1817
1818 if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
1819 this.trigger('warn', {
1820 message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
1821 });
1822 }
1823 },
1824 'preload-hint': function preloadHint() {
1825 // parts are always specifed before a segment
1826 var segmentIndex = this.manifest.segments.length;
1827 var hint = camelCaseKeys(entry.attributes);
1828 var isPart = hint.type && hint.type === 'PART';
1829 currentUri.preloadHints = currentUri.preloadHints || [];
1830 currentUri.preloadHints.push(hint);
1831
1832 if (hint.byterange) {
1833 if (!hint.byterange.hasOwnProperty('offset')) {
1834 // use last part byterange end or zero if not a part.
1835 hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
1836
1837 if (isPart) {
1838 lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
1839 }
1840 }
1841 }
1842
1843 var index = currentUri.preloadHints.length - 1;
1844 this.warnOnMissingAttributes_("#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex, entry.attributes, ['TYPE', 'URI']);
1845
1846 if (!hint.type) {
1847 return;
1848 } // search through all preload hints except for the current one for
1849 // a duplicate type.
1850
1851
1852 for (var i = 0; i < currentUri.preloadHints.length - 1; i++) {
1853 var otherHint = currentUri.preloadHints[i];
1854
1855 if (!otherHint.type) {
1856 continue;
1857 }
1858
1859 if (otherHint.type === hint.type) {
1860 this.trigger('warn', {
1861 message: "#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex + " has the same TYPE " + hint.type + " as preload hint #" + i
1862 });
1863 }
1864 }
1865 },
1866 'rendition-report': function renditionReport() {
1867 var report = camelCaseKeys(entry.attributes);
1868 this.manifest.renditionReports = this.manifest.renditionReports || [];
1869 this.manifest.renditionReports.push(report);
1870 var index = this.manifest.renditionReports.length - 1;
1871 var required = ['LAST-MSN', 'URI'];
1872
1873 if (hasParts) {
1874 required.push('LAST-PART');
1875 }
1876
1877 this.warnOnMissingAttributes_("#EXT-X-RENDITION-REPORT #" + index, entry.attributes, required);
1878 },
1879 'part-inf': function partInf() {
1880 this.manifest.partInf = camelCaseKeys(entry.attributes);
1881 this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
1882
1883 if (this.manifest.partInf.partTarget) {
1884 this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
1885 }
1886
1887 setHoldBack.call(this, this.manifest);
1888 }
1889 })[entry.tagType] || noop).call(self);
1890 },
1891 uri: function uri() {
1892 currentUri.uri = entry.uri;
1893 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1894
1895 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1896 this.trigger('warn', {
1897 message: 'defaulting segment duration to the target duration'
1898 });
1899 currentUri.duration = this.manifest.targetDuration;
1900 } // annotate with encryption information, if necessary
1901
1902
1903 if (_key) {
1904 currentUri.key = _key;
1905 }
1906
1907 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1908
1909 if (currentMap) {
1910 currentUri.map = currentMap;
1911 } // reset the last byterange end as it needs to be 0 between parts
1912
1913
1914 lastPartByterangeEnd = 0; // prepare for the next URI
1915
1916 currentUri = {};
1917 },
1918 comment: function comment() {// comments are not important for playback
1919 },
1920 custom: function custom() {
1921 // if this is segment-level data attach the output to the segment
1922 if (entry.segment) {
1923 currentUri.custom = currentUri.custom || {};
1924 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1925 } else {
1926 this.manifest.custom = this.manifest.custom || {};
1927 this.manifest.custom[entry.customType] = entry.data;
1928 }
1929 }
1930 })[entry.type].call(self);
1931 });
1932
1933 return _this;
1934 }
1935
1936 var _proto = Parser.prototype;
1937
1938 _proto.warnOnMissingAttributes_ = function warnOnMissingAttributes_(identifier, attributes, required) {
1939 var missing = [];
1940 required.forEach(function (key) {
1941 if (!attributes.hasOwnProperty(key)) {
1942 missing.push(key);
1943 }
1944 });
1945
1946 if (missing.length) {
1947 this.trigger('warn', {
1948 message: identifier + " lacks required attribute(s): " + missing.join(', ')
1949 });
1950 }
1951 }
1952 /**
1953 * Parse the input string and update the manifest object.
1954 *
1955 * @param {string} chunk a potentially incomplete portion of the manifest
1956 */
1957 ;
1958
1959 _proto.push = function push(chunk) {
1960 this.lineStream.push(chunk);
1961 }
1962 /**
1963 * Flush any remaining input. This can be handy if the last line of an M3U8
1964 * manifest did not contain a trailing newline but the file has been
1965 * completely received.
1966 */
1967 ;
1968
1969 _proto.end = function end() {
1970 // flush any buffered input
1971 this.lineStream.push('\n');
1972 this.trigger('end');
1973 }
1974 /**
1975 * Add an additional parser for non-standard tags
1976 *
1977 * @param {Object} options a map of options for the added parser
1978 * @param {RegExp} options.expression a regular expression to match the custom header
1979 * @param {string} options.type the type to register to the output
1980 * @param {Function} [options.dataParser] function to parse the line into an object
1981 * @param {boolean} [options.segment] should tag data be attached to the segment object
1982 */
1983 ;
1984
1985 _proto.addParser = function addParser(options) {
1986 this.parseStream.addParser(options);
1987 }
1988 /**
1989 * Add a custom header mapper
1990 *
1991 * @param {Object} options
1992 * @param {RegExp} options.expression a regular expression to match the custom header
1993 * @param {Function} options.map function to translate tag into a different tag
1994 */
1995 ;
1996
1997 _proto.addTagMapper = function addTagMapper(options) {
1998 this.parseStream.addTagMapper(options);
1999 };
2000
2001 return Parser;
2002 }(Stream);
2003
2004 var regexs = {
2005 // to determine mime types
2006 mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
2007 webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
2008 ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
2009 // to determine if a codec is audio or video
2010 video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
2011 audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
2012 text: /^(stpp.ttml.im1t)/,
2013 // mux.js support regex
2014 muxerVideo: /^(avc0?1)/,
2015 muxerAudio: /^(mp4a)/,
2016 // match nothing as muxer does not support text right now.
2017 // there cannot never be a character before the start of a string
2018 // so this matches nothing.
2019 muxerText: /a^/
2020 };
2021 var mediaTypes = ['video', 'audio', 'text'];
2022 var upperMediaTypes = ['Video', 'Audio', 'Text'];
2023 /**
2024 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
2025 * `avc1.<hhhhhh>`
2026 *
2027 * @param {string} codec
2028 * Codec string to translate
2029 * @return {string}
2030 * The translated codec string
2031 */
2032
2033 var translateLegacyCodec = function translateLegacyCodec(codec) {
2034 if (!codec) {
2035 return codec;
2036 }
2037
2038 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
2039 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
2040 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
2041 return 'avc1.' + profileHex + '00' + avcLevelHex;
2042 });
2043 };
2044 /**
2045 * @typedef {Object} ParsedCodecInfo
2046 * @property {number} codecCount
2047 * Number of codecs parsed
2048 * @property {string} [videoCodec]
2049 * Parsed video codec (if found)
2050 * @property {string} [videoObjectTypeIndicator]
2051 * Video object type indicator (if found)
2052 * @property {string|null} audioProfile
2053 * Audio profile
2054 */
2055
2056 /**
2057 * Parses a codec string to retrieve the number of codecs specified, the video codec and
2058 * object type indicator, and the audio profile.
2059 *
2060 * @param {string} [codecString]
2061 * The codec string to parse
2062 * @return {ParsedCodecInfo}
2063 * Parsed codec info
2064 */
2065
2066 var parseCodecs = function parseCodecs(codecString) {
2067 if (codecString === void 0) {
2068 codecString = '';
2069 }
2070
2071 var codecs = codecString.split(',');
2072 var result = [];
2073 codecs.forEach(function (codec) {
2074 codec = codec.trim();
2075 var codecType;
2076 mediaTypes.forEach(function (name) {
2077 var match = regexs[name].exec(codec.toLowerCase());
2078
2079 if (!match || match.length <= 1) {
2080 return;
2081 }
2082
2083 codecType = name; // maintain codec case
2084
2085 var type = codec.substring(0, match[1].length);
2086 var details = codec.replace(type, '');
2087 result.push({
2088 type: type,
2089 details: details,
2090 mediaType: name
2091 });
2092 });
2093
2094 if (!codecType) {
2095 result.push({
2096 type: codec,
2097 details: '',
2098 mediaType: 'unknown'
2099 });
2100 }
2101 });
2102 return result;
2103 };
2104 /**
2105 * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
2106 * a default alternate audio playlist for the provided audio group.
2107 *
2108 * @param {Object} master
2109 * The master playlist
2110 * @param {string} audioGroupId
2111 * ID of the audio group for which to find the default codec info
2112 * @return {ParsedCodecInfo}
2113 * Parsed codec info
2114 */
2115
2116 var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
2117 if (!master.mediaGroups.AUDIO || !audioGroupId) {
2118 return null;
2119 }
2120
2121 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
2122
2123 if (!audioGroup) {
2124 return null;
2125 }
2126
2127 for (var name in audioGroup) {
2128 var audioType = audioGroup[name];
2129
2130 if (audioType.default && audioType.playlists) {
2131 // codec should be the same for all playlists within the audio type
2132 return parseCodecs(audioType.playlists[0].attributes.CODECS);
2133 }
2134 }
2135
2136 return null;
2137 };
2138 var isAudioCodec = function isAudioCodec(codec) {
2139 if (codec === void 0) {
2140 codec = '';
2141 }
2142
2143 return regexs.audio.test(codec.trim().toLowerCase());
2144 };
2145 var isTextCodec = function isTextCodec(codec) {
2146 if (codec === void 0) {
2147 codec = '';
2148 }
2149
2150 return regexs.text.test(codec.trim().toLowerCase());
2151 };
2152 var getMimeForCodec = function getMimeForCodec(codecString) {
2153 if (!codecString || typeof codecString !== 'string') {
2154 return;
2155 }
2156
2157 var codecs = codecString.toLowerCase().split(',').map(function (c) {
2158 return translateLegacyCodec(c.trim());
2159 }); // default to video type
2160
2161 var type = 'video'; // only change to audio type if the only codec we have is
2162 // audio
2163
2164 if (codecs.length === 1 && isAudioCodec(codecs[0])) {
2165 type = 'audio';
2166 } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
2167 // text uses application/<container> for now
2168 type = 'application';
2169 } // default the container to mp4
2170
2171
2172 var container = 'mp4'; // every codec must be able to go into the container
2173 // for that container to be the correct one
2174
2175 if (codecs.every(function (c) {
2176 return regexs.mp4.test(c);
2177 })) {
2178 container = 'mp4';
2179 } else if (codecs.every(function (c) {
2180 return regexs.webm.test(c);
2181 })) {
2182 container = 'webm';
2183 } else if (codecs.every(function (c) {
2184 return regexs.ogg.test(c);
2185 })) {
2186 container = 'ogg';
2187 }
2188
2189 return type + "/" + container + ";codecs=\"" + codecString + "\"";
2190 };
2191 var browserSupportsCodec = function browserSupportsCodec(codecString) {
2192 if (codecString === void 0) {
2193 codecString = '';
2194 }
2195
2196 return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
2197 };
2198 var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
2199 if (codecString === void 0) {
2200 codecString = '';
2201 }
2202
2203 return codecString.toLowerCase().split(',').every(function (codec) {
2204 codec = codec.trim(); // any match is supported.
2205
2206 for (var i = 0; i < upperMediaTypes.length; i++) {
2207 var type = upperMediaTypes[i];
2208
2209 if (regexs["muxer" + type].test(codec)) {
2210 return true;
2211 }
2212 }
2213
2214 return false;
2215 });
2216 };
2217 var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
2218 var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
2219
2220 /**
2221 * ranges
2222 *
2223 * Utilities for working with TimeRanges.
2224 *
2225 */
2226
2227 var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
2228 // can be misleading because of precision differences or when the current media has poorly
2229 // aligned audio and video, which can cause values to be slightly off from what you would
2230 // expect. This value is what we consider to be safe to use in such comparisons to account
2231 // for these scenarios.
2232
2233 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2234
2235 var filterRanges = function filterRanges(timeRanges, predicate) {
2236 var results = [];
2237 var i;
2238
2239 if (timeRanges && timeRanges.length) {
2240 // Search for ranges that match the predicate
2241 for (i = 0; i < timeRanges.length; i++) {
2242 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2243 results.push([timeRanges.start(i), timeRanges.end(i)]);
2244 }
2245 }
2246 }
2247
2248 return videojs__default["default"].createTimeRanges(results);
2249 };
2250 /**
2251 * Attempts to find the buffered TimeRange that contains the specified
2252 * time.
2253 *
2254 * @param {TimeRanges} buffered - the TimeRanges object to query
2255 * @param {number} time - the time to filter on.
2256 * @return {TimeRanges} a new TimeRanges object
2257 */
2258
2259
2260 var findRange = function findRange(buffered, time) {
2261 return filterRanges(buffered, function (start, end) {
2262 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2263 });
2264 };
2265 /**
2266 * Returns the TimeRanges that begin later than the specified time.
2267 *
2268 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2269 * @param {number} time - the time to filter on.
2270 * @return {TimeRanges} a new TimeRanges object.
2271 */
2272
2273 var findNextRange = function findNextRange(timeRanges, time) {
2274 return filterRanges(timeRanges, function (start) {
2275 return start - TIME_FUDGE_FACTOR >= time;
2276 });
2277 };
2278 /**
2279 * Returns gaps within a list of TimeRanges
2280 *
2281 * @param {TimeRanges} buffered - the TimeRanges object
2282 * @return {TimeRanges} a TimeRanges object of gaps
2283 */
2284
2285 var findGaps = function findGaps(buffered) {
2286 if (buffered.length < 2) {
2287 return videojs__default["default"].createTimeRanges();
2288 }
2289
2290 var ranges = [];
2291
2292 for (var i = 1; i < buffered.length; i++) {
2293 var start = buffered.end(i - 1);
2294 var end = buffered.start(i);
2295 ranges.push([start, end]);
2296 }
2297
2298 return videojs__default["default"].createTimeRanges(ranges);
2299 };
2300 /**
2301 * Calculate the intersection of two TimeRanges
2302 *
2303 * @param {TimeRanges} bufferA
2304 * @param {TimeRanges} bufferB
2305 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
2306 */
2307
2308 var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
2309 var start = null;
2310 var end = null;
2311 var arity = 0;
2312 var extents = [];
2313 var ranges = [];
2314
2315 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
2316 return videojs__default["default"].createTimeRange();
2317 } // Handle the case where we have both buffers and create an
2318 // intersection of the two
2319
2320
2321 var count = bufferA.length; // A) Gather up all start and end times
2322
2323 while (count--) {
2324 extents.push({
2325 time: bufferA.start(count),
2326 type: 'start'
2327 });
2328 extents.push({
2329 time: bufferA.end(count),
2330 type: 'end'
2331 });
2332 }
2333
2334 count = bufferB.length;
2335
2336 while (count--) {
2337 extents.push({
2338 time: bufferB.start(count),
2339 type: 'start'
2340 });
2341 extents.push({
2342 time: bufferB.end(count),
2343 type: 'end'
2344 });
2345 } // B) Sort them by time
2346
2347
2348 extents.sort(function (a, b) {
2349 return a.time - b.time;
2350 }); // C) Go along one by one incrementing arity for start and decrementing
2351 // arity for ends
2352
2353 for (count = 0; count < extents.length; count++) {
2354 if (extents[count].type === 'start') {
2355 arity++; // D) If arity is ever incremented to 2 we are entering an
2356 // overlapping range
2357
2358 if (arity === 2) {
2359 start = extents[count].time;
2360 }
2361 } else if (extents[count].type === 'end') {
2362 arity--; // E) If arity is ever decremented to 1 we leaving an
2363 // overlapping range
2364
2365 if (arity === 1) {
2366 end = extents[count].time;
2367 }
2368 } // F) Record overlapping ranges
2369
2370
2371 if (start !== null && end !== null) {
2372 ranges.push([start, end]);
2373 start = null;
2374 end = null;
2375 }
2376 }
2377
2378 return videojs__default["default"].createTimeRanges(ranges);
2379 };
2380 /**
2381 * Gets a human readable string for a TimeRange
2382 *
2383 * @param {TimeRange} range
2384 * @return {string} a human readable string
2385 */
2386
2387 var printableRange = function printableRange(range) {
2388 var strArr = [];
2389
2390 if (!range || !range.length) {
2391 return '';
2392 }
2393
2394 for (var i = 0; i < range.length; i++) {
2395 strArr.push(range.start(i) + ' => ' + range.end(i));
2396 }
2397
2398 return strArr.join(', ');
2399 };
2400 /**
2401 * Calculates the amount of time left in seconds until the player hits the end of the
2402 * buffer and causes a rebuffer
2403 *
2404 * @param {TimeRange} buffered
2405 * The state of the buffer
2406 * @param {Numnber} currentTime
2407 * The current time of the player
2408 * @param {number} playbackRate
2409 * The current playback rate of the player. Defaults to 1.
2410 * @return {number}
2411 * Time until the player has to start rebuffering in seconds.
2412 * @function timeUntilRebuffer
2413 */
2414
2415 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
2416 if (playbackRate === void 0) {
2417 playbackRate = 1;
2418 }
2419
2420 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
2421 return (bufferedEnd - currentTime) / playbackRate;
2422 };
2423 /**
2424 * Converts a TimeRanges object into an array representation
2425 *
2426 * @param {TimeRanges} timeRanges
2427 * @return {Array}
2428 */
2429
2430 var timeRangesToArray = function timeRangesToArray(timeRanges) {
2431 var timeRangesList = [];
2432
2433 for (var i = 0; i < timeRanges.length; i++) {
2434 timeRangesList.push({
2435 start: timeRanges.start(i),
2436 end: timeRanges.end(i)
2437 });
2438 }
2439
2440 return timeRangesList;
2441 };
2442 /**
2443 * Determines if two time range objects are different.
2444 *
2445 * @param {TimeRange} a
2446 * the first time range object to check
2447 *
2448 * @param {TimeRange} b
2449 * the second time range object to check
2450 *
2451 * @return {Boolean}
2452 * Whether the time range objects differ
2453 */
2454
2455 var isRangeDifferent = function isRangeDifferent(a, b) {
2456 // same object
2457 if (a === b) {
2458 return false;
2459 } // one or the other is undefined
2460
2461
2462 if (!a && b || !b && a) {
2463 return true;
2464 } // length is different
2465
2466
2467 if (a.length !== b.length) {
2468 return true;
2469 } // see if any start/end pair is different
2470
2471
2472 for (var i = 0; i < a.length; i++) {
2473 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
2474 return true;
2475 }
2476 } // if the length and every pair is the same
2477 // this is the same time range
2478
2479
2480 return false;
2481 };
2482 var lastBufferedEnd = function lastBufferedEnd(a) {
2483 if (!a || !a.length || !a.end) {
2484 return;
2485 }
2486
2487 return a.end(a.length - 1);
2488 };
2489 /**
2490 * A utility function to add up the amount of time in a timeRange
2491 * after a specified startTime.
2492 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
2493 * would return 40 as there are 40s seconds after 0 in the timeRange
2494 *
2495 * @param {TimeRange} range
2496 * The range to check against
2497 * @param {number} startTime
2498 * The time in the time range that you should start counting from
2499 *
2500 * @return {number}
2501 * The number of seconds in the buffer passed the specified time.
2502 */
2503
2504 var timeAheadOf = function timeAheadOf(range, startTime) {
2505 var time = 0;
2506
2507 if (!range || !range.length) {
2508 return time;
2509 }
2510
2511 for (var i = 0; i < range.length; i++) {
2512 var start = range.start(i);
2513 var end = range.end(i); // startTime is after this range entirely
2514
2515 if (startTime > end) {
2516 continue;
2517 } // startTime is within this range
2518
2519
2520 if (startTime > start && startTime <= end) {
2521 time += end - startTime;
2522 continue;
2523 } // startTime is before this range.
2524
2525
2526 time += end - start;
2527 }
2528
2529 return time;
2530 };
2531
2532 /**
2533 * @file playlist.js
2534 *
2535 * Playlist related utilities.
2536 */
2537 var createTimeRange = videojs__default["default"].createTimeRange;
2538 /**
2539 * Get the duration of a segment, with special cases for
2540 * llhls segments that do not have a duration yet.
2541 *
2542 * @param {Object} playlist
2543 * the playlist that the segment belongs to.
2544 * @param {Object} segment
2545 * the segment to get a duration for.
2546 *
2547 * @return {number}
2548 * the segment duration
2549 */
2550
2551 var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
2552 // if this isn't a preload segment
2553 // then we will have a segment duration that is accurate.
2554 if (!segment.preload) {
2555 return segment.duration;
2556 } // otherwise we have to add up parts and preload hints
2557 // to get an up to date duration.
2558
2559
2560 var result = 0;
2561 (segment.parts || []).forEach(function (p) {
2562 result += p.duration;
2563 }); // for preload hints we have to use partTargetDuration
2564 // as they won't even have a duration yet.
2565
2566 (segment.preloadHints || []).forEach(function (p) {
2567 if (p.type === 'PART') {
2568 result += playlist.partTargetDuration;
2569 }
2570 });
2571 return result;
2572 };
2573 /**
2574 * A function to get a combined list of parts and segments with durations
2575 * and indexes.
2576 *
2577 * @param {Playlist} playlist the playlist to get the list for.
2578 *
2579 * @return {Array} The part/segment list.
2580 */
2581
2582 var getPartsAndSegments = function getPartsAndSegments(playlist) {
2583 return (playlist.segments || []).reduce(function (acc, segment, si) {
2584 if (segment.parts) {
2585 segment.parts.forEach(function (part, pi) {
2586 acc.push({
2587 duration: part.duration,
2588 segmentIndex: si,
2589 partIndex: pi,
2590 part: part,
2591 segment: segment
2592 });
2593 });
2594 } else {
2595 acc.push({
2596 duration: segment.duration,
2597 segmentIndex: si,
2598 partIndex: null,
2599 segment: segment,
2600 part: null
2601 });
2602 }
2603
2604 return acc;
2605 }, []);
2606 };
2607 var getLastParts = function getLastParts(media) {
2608 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
2609 return lastSegment && lastSegment.parts || [];
2610 };
2611 var getKnownPartCount = function getKnownPartCount(_ref) {
2612 var preloadSegment = _ref.preloadSegment;
2613
2614 if (!preloadSegment) {
2615 return;
2616 }
2617
2618 var parts = preloadSegment.parts,
2619 preloadHints = preloadSegment.preloadHints;
2620 var partCount = (preloadHints || []).reduce(function (count, hint) {
2621 return count + (hint.type === 'PART' ? 1 : 0);
2622 }, 0);
2623 partCount += parts && parts.length ? parts.length : 0;
2624 return partCount;
2625 };
2626 /**
2627 * Get the number of seconds to delay from the end of a
2628 * live playlist.
2629 *
2630 * @param {Playlist} master the master playlist
2631 * @param {Playlist} media the media playlist
2632 * @return {number} the hold back in seconds.
2633 */
2634
2635 var liveEdgeDelay = function liveEdgeDelay(master, media) {
2636 if (media.endList) {
2637 return 0;
2638 } // dash suggestedPresentationDelay trumps everything
2639
2640
2641 if (master && master.suggestedPresentationDelay) {
2642 return master.suggestedPresentationDelay;
2643 }
2644
2645 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
2646
2647 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
2648 return media.serverControl.partHoldBack;
2649 } else if (hasParts && media.partTargetDuration) {
2650 return media.partTargetDuration * 3; // finally look for full segment delays
2651 } else if (media.serverControl && media.serverControl.holdBack) {
2652 return media.serverControl.holdBack;
2653 } else if (media.targetDuration) {
2654 return media.targetDuration * 3;
2655 }
2656
2657 return 0;
2658 };
2659 /**
2660 * walk backward until we find a duration we can use
2661 * or return a failure
2662 *
2663 * @param {Playlist} playlist the playlist to walk through
2664 * @param {Number} endSequence the mediaSequence to stop walking on
2665 */
2666
2667 var backwardDuration = function backwardDuration(playlist, endSequence) {
2668 var result = 0;
2669 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
2670 // the interval, use it
2671
2672 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
2673 // information that is earlier than endSequence
2674
2675 if (segment) {
2676 if (typeof segment.start !== 'undefined') {
2677 return {
2678 result: segment.start,
2679 precise: true
2680 };
2681 }
2682
2683 if (typeof segment.end !== 'undefined') {
2684 return {
2685 result: segment.end - segment.duration,
2686 precise: true
2687 };
2688 }
2689 }
2690
2691 while (i--) {
2692 segment = playlist.segments[i];
2693
2694 if (typeof segment.end !== 'undefined') {
2695 return {
2696 result: result + segment.end,
2697 precise: true
2698 };
2699 }
2700
2701 result += segmentDurationWithParts(playlist, segment);
2702
2703 if (typeof segment.start !== 'undefined') {
2704 return {
2705 result: result + segment.start,
2706 precise: true
2707 };
2708 }
2709 }
2710
2711 return {
2712 result: result,
2713 precise: false
2714 };
2715 };
2716 /**
2717 * walk forward until we find a duration we can use
2718 * or return a failure
2719 *
2720 * @param {Playlist} playlist the playlist to walk through
2721 * @param {number} endSequence the mediaSequence to stop walking on
2722 */
2723
2724
2725 var forwardDuration = function forwardDuration(playlist, endSequence) {
2726 var result = 0;
2727 var segment;
2728 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
2729 // information
2730
2731 for (; i < playlist.segments.length; i++) {
2732 segment = playlist.segments[i];
2733
2734 if (typeof segment.start !== 'undefined') {
2735 return {
2736 result: segment.start - result,
2737 precise: true
2738 };
2739 }
2740
2741 result += segmentDurationWithParts(playlist, segment);
2742
2743 if (typeof segment.end !== 'undefined') {
2744 return {
2745 result: segment.end - result,
2746 precise: true
2747 };
2748 }
2749 } // indicate we didn't find a useful duration estimate
2750
2751
2752 return {
2753 result: -1,
2754 precise: false
2755 };
2756 };
2757 /**
2758 * Calculate the media duration from the segments associated with a
2759 * playlist. The duration of a subinterval of the available segments
2760 * may be calculated by specifying an end index.
2761 *
2762 * @param {Object} playlist a media playlist object
2763 * @param {number=} endSequence an exclusive upper boundary
2764 * for the playlist. Defaults to playlist length.
2765 * @param {number} expired the amount of time that has dropped
2766 * off the front of the playlist in a live scenario
2767 * @return {number} the duration between the first available segment
2768 * and end index.
2769 */
2770
2771
2772 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
2773 if (typeof endSequence === 'undefined') {
2774 endSequence = playlist.mediaSequence + playlist.segments.length;
2775 }
2776
2777 if (endSequence < playlist.mediaSequence) {
2778 return 0;
2779 } // do a backward walk to estimate the duration
2780
2781
2782 var backward = backwardDuration(playlist, endSequence);
2783
2784 if (backward.precise) {
2785 // if we were able to base our duration estimate on timing
2786 // information provided directly from the Media Source, return
2787 // it
2788 return backward.result;
2789 } // walk forward to see if a precise duration estimate can be made
2790 // that way
2791
2792
2793 var forward = forwardDuration(playlist, endSequence);
2794
2795 if (forward.precise) {
2796 // we found a segment that has been buffered and so it's
2797 // position is known precisely
2798 return forward.result;
2799 } // return the less-precise, playlist-based duration estimate
2800
2801
2802 return backward.result + expired;
2803 };
2804 /**
2805 * Calculates the duration of a playlist. If a start and end index
2806 * are specified, the duration will be for the subset of the media
2807 * timeline between those two indices. The total duration for live
2808 * playlists is always Infinity.
2809 *
2810 * @param {Object} playlist a media playlist object
2811 * @param {number=} endSequence an exclusive upper
2812 * boundary for the playlist. Defaults to the playlist media
2813 * sequence number plus its length.
2814 * @param {number=} expired the amount of time that has
2815 * dropped off the front of the playlist in a live scenario
2816 * @return {number} the duration between the start index and end
2817 * index.
2818 */
2819
2820
2821 var duration = function duration(playlist, endSequence, expired) {
2822 if (!playlist) {
2823 return 0;
2824 }
2825
2826 if (typeof expired !== 'number') {
2827 expired = 0;
2828 } // if a slice of the total duration is not requested, use
2829 // playlist-level duration indicators when they're present
2830
2831
2832 if (typeof endSequence === 'undefined') {
2833 // if present, use the duration specified in the playlist
2834 if (playlist.totalDuration) {
2835 return playlist.totalDuration;
2836 } // duration should be Infinity for live playlists
2837
2838
2839 if (!playlist.endList) {
2840 return window.Infinity;
2841 }
2842 } // calculate the total duration based on the segment durations
2843
2844
2845 return intervalDuration(playlist, endSequence, expired);
2846 };
2847 /**
2848 * Calculate the time between two indexes in the current playlist
2849 * neight the start- nor the end-index need to be within the current
2850 * playlist in which case, the targetDuration of the playlist is used
2851 * to approximate the durations of the segments
2852 *
2853 * @param {Array} options.durationList list to iterate over for durations.
2854 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
2855 * @param {number} options.startIndex partsAndSegments index to start
2856 * @param {number} options.endIndex partsAndSegments index to end.
2857 * @return {number} the number of seconds between startIndex and endIndex
2858 */
2859
2860 var sumDurations = function sumDurations(_ref2) {
2861 var defaultDuration = _ref2.defaultDuration,
2862 durationList = _ref2.durationList,
2863 startIndex = _ref2.startIndex,
2864 endIndex = _ref2.endIndex;
2865 var durations = 0;
2866
2867 if (startIndex > endIndex) {
2868 var _ref3 = [endIndex, startIndex];
2869 startIndex = _ref3[0];
2870 endIndex = _ref3[1];
2871 }
2872
2873 if (startIndex < 0) {
2874 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
2875 durations += defaultDuration;
2876 }
2877
2878 startIndex = 0;
2879 }
2880
2881 for (var _i = startIndex; _i < endIndex; _i++) {
2882 durations += durationList[_i].duration;
2883 }
2884
2885 return durations;
2886 };
2887 /**
2888 * Calculates the playlist end time
2889 *
2890 * @param {Object} playlist a media playlist object
2891 * @param {number=} expired the amount of time that has
2892 * dropped off the front of the playlist in a live scenario
2893 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
2894 * playlist end calculation should consider the safe live end
2895 * (truncate the playlist end by three segments). This is normally
2896 * used for calculating the end of the playlist's seekable range.
2897 * This takes into account the value of liveEdgePadding.
2898 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
2899 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
2900 * If this is provided, it is used in the safe live end calculation.
2901 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
2902 * Corresponds to suggestedPresentationDelay in DASH manifests.
2903 * @return {number} the end time of playlist
2904 * @function playlistEnd
2905 */
2906
2907 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
2908 if (!playlist || !playlist.segments) {
2909 return null;
2910 }
2911
2912 if (playlist.endList) {
2913 return duration(playlist);
2914 }
2915
2916 if (expired === null) {
2917 return null;
2918 }
2919
2920 expired = expired || 0;
2921 var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
2922
2923 if (useSafeLiveEnd) {
2924 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
2925 lastSegmentEndTime -= liveEdgePadding;
2926 } // don't return a time less than zero
2927
2928
2929 return Math.max(0, lastSegmentEndTime);
2930 };
2931 /**
2932 * Calculates the interval of time that is currently seekable in a
2933 * playlist. The returned time ranges are relative to the earliest
2934 * moment in the specified playlist that is still available. A full
2935 * seekable implementation for live streams would need to offset
2936 * these values by the duration of content that has expired from the
2937 * stream.
2938 *
2939 * @param {Object} playlist a media playlist object
2940 * dropped off the front of the playlist in a live scenario
2941 * @param {number=} expired the amount of time that has
2942 * dropped off the front of the playlist in a live scenario
2943 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
2944 * Corresponds to suggestedPresentationDelay in DASH manifests.
2945 * @return {TimeRanges} the periods of time that are valid targets
2946 * for seeking
2947 */
2948
2949 var seekable = function seekable(playlist, expired, liveEdgePadding) {
2950 var useSafeLiveEnd = true;
2951 var seekableStart = expired || 0;
2952 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
2953
2954 if (seekableEnd === null) {
2955 return createTimeRange();
2956 }
2957
2958 return createTimeRange(seekableStart, seekableEnd);
2959 };
2960 /**
2961 * Determine the index and estimated starting time of the segment that
2962 * contains a specified playback position in a media playlist.
2963 *
2964 * @param {Object} options.playlist the media playlist to query
2965 * @param {number} options.currentTime The number of seconds since the earliest
2966 * possible position to determine the containing segment for
2967 * @param {number} options.startTime the time when the segment/part starts
2968 * @param {number} options.startingSegmentIndex the segment index to start looking at.
2969 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
2970 *
2971 * @return {Object} an object with partIndex, segmentIndex, and startTime.
2972 */
2973
2974 var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
2975 var playlist = _ref4.playlist,
2976 currentTime = _ref4.currentTime,
2977 startingSegmentIndex = _ref4.startingSegmentIndex,
2978 startingPartIndex = _ref4.startingPartIndex,
2979 startTime = _ref4.startTime,
2980 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
2981 var time = currentTime - startTime;
2982 var partsAndSegments = getPartsAndSegments(playlist);
2983 var startIndex = 0;
2984
2985 for (var i = 0; i < partsAndSegments.length; i++) {
2986 var partAndSegment = partsAndSegments[i];
2987
2988 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
2989 continue;
2990 } // skip this if part index does not match.
2991
2992
2993 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
2994 continue;
2995 }
2996
2997 startIndex = i;
2998 break;
2999 }
3000
3001 if (time < 0) {
3002 // Walk backward from startIndex in the playlist, adding durations
3003 // until we find a segment that contains `time` and return it
3004 if (startIndex > 0) {
3005 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
3006 var _partAndSegment = partsAndSegments[_i2];
3007 time += _partAndSegment.duration;
3008
3009 if (experimentalExactManifestTimings) {
3010 if (time < 0) {
3011 continue;
3012 }
3013 } else if (time + TIME_FUDGE_FACTOR <= 0) {
3014 continue;
3015 }
3016
3017 return {
3018 partIndex: _partAndSegment.partIndex,
3019 segmentIndex: _partAndSegment.segmentIndex,
3020 startTime: startTime - sumDurations({
3021 defaultDuration: playlist.targetDuration,
3022 durationList: partsAndSegments,
3023 startIndex: startIndex,
3024 endIndex: _i2
3025 })
3026 };
3027 }
3028 } // We were unable to find a good segment within the playlist
3029 // so select the first segment
3030
3031
3032 return {
3033 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3034 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3035 startTime: currentTime
3036 };
3037 } // When startIndex is negative, we first walk forward to first segment
3038 // adding target durations. If we "run out of time" before getting to
3039 // the first segment, return the first segment
3040
3041
3042 if (startIndex < 0) {
3043 for (var _i3 = startIndex; _i3 < 0; _i3++) {
3044 time -= playlist.targetDuration;
3045
3046 if (time < 0) {
3047 return {
3048 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3049 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3050 startTime: currentTime
3051 };
3052 }
3053 }
3054
3055 startIndex = 0;
3056 } // Walk forward from startIndex in the playlist, subtracting durations
3057 // until we find a segment that contains `time` and return it
3058
3059
3060 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
3061 var _partAndSegment2 = partsAndSegments[_i4];
3062 time -= _partAndSegment2.duration;
3063
3064 if (experimentalExactManifestTimings) {
3065 if (time > 0) {
3066 continue;
3067 }
3068 } else if (time - TIME_FUDGE_FACTOR >= 0) {
3069 continue;
3070 }
3071
3072 return {
3073 partIndex: _partAndSegment2.partIndex,
3074 segmentIndex: _partAndSegment2.segmentIndex,
3075 startTime: startTime + sumDurations({
3076 defaultDuration: playlist.targetDuration,
3077 durationList: partsAndSegments,
3078 startIndex: startIndex,
3079 endIndex: _i4
3080 })
3081 };
3082 } // We are out of possible candidates so load the last one...
3083
3084
3085 return {
3086 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
3087 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
3088 startTime: currentTime
3089 };
3090 };
3091 /**
3092 * Check whether the playlist is blacklisted or not.
3093 *
3094 * @param {Object} playlist the media playlist object
3095 * @return {boolean} whether the playlist is blacklisted or not
3096 * @function isBlacklisted
3097 */
3098
3099 var isBlacklisted = function isBlacklisted(playlist) {
3100 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
3101 };
3102 /**
3103 * Check whether the playlist is compatible with current playback configuration or has
3104 * been blacklisted permanently for being incompatible.
3105 *
3106 * @param {Object} playlist the media playlist object
3107 * @return {boolean} whether the playlist is incompatible or not
3108 * @function isIncompatible
3109 */
3110
3111 var isIncompatible = function isIncompatible(playlist) {
3112 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
3113 };
3114 /**
3115 * Check whether the playlist is enabled or not.
3116 *
3117 * @param {Object} playlist the media playlist object
3118 * @return {boolean} whether the playlist is enabled or not
3119 * @function isEnabled
3120 */
3121
3122 var isEnabled = function isEnabled(playlist) {
3123 var blacklisted = isBlacklisted(playlist);
3124 return !playlist.disabled && !blacklisted;
3125 };
3126 /**
3127 * Check whether the playlist has been manually disabled through the representations api.
3128 *
3129 * @param {Object} playlist the media playlist object
3130 * @return {boolean} whether the playlist is disabled manually or not
3131 * @function isDisabled
3132 */
3133
3134 var isDisabled = function isDisabled(playlist) {
3135 return playlist.disabled;
3136 };
3137 /**
3138 * Returns whether the current playlist is an AES encrypted HLS stream
3139 *
3140 * @return {boolean} true if it's an AES encrypted HLS stream
3141 */
3142
3143 var isAes = function isAes(media) {
3144 for (var i = 0; i < media.segments.length; i++) {
3145 if (media.segments[i].key) {
3146 return true;
3147 }
3148 }
3149
3150 return false;
3151 };
3152 /**
3153 * Checks if the playlist has a value for the specified attribute
3154 *
3155 * @param {string} attr
3156 * Attribute to check for
3157 * @param {Object} playlist
3158 * The media playlist object
3159 * @return {boolean}
3160 * Whether the playlist contains a value for the attribute or not
3161 * @function hasAttribute
3162 */
3163
3164 var hasAttribute = function hasAttribute(attr, playlist) {
3165 return playlist.attributes && playlist.attributes[attr];
3166 };
3167 /**
3168 * Estimates the time required to complete a segment download from the specified playlist
3169 *
3170 * @param {number} segmentDuration
3171 * Duration of requested segment
3172 * @param {number} bandwidth
3173 * Current measured bandwidth of the player
3174 * @param {Object} playlist
3175 * The media playlist object
3176 * @param {number=} bytesReceived
3177 * Number of bytes already received for the request. Defaults to 0
3178 * @return {number|NaN}
3179 * The estimated time to request the segment. NaN if bandwidth information for
3180 * the given playlist is unavailable
3181 * @function estimateSegmentRequestTime
3182 */
3183
3184 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
3185 if (bytesReceived === void 0) {
3186 bytesReceived = 0;
3187 }
3188
3189 if (!hasAttribute('BANDWIDTH', playlist)) {
3190 return NaN;
3191 }
3192
3193 var size = segmentDuration * playlist.attributes.BANDWIDTH;
3194 return (size - bytesReceived * 8) / bandwidth;
3195 };
3196 /*
3197 * Returns whether the current playlist is the lowest rendition
3198 *
3199 * @return {Boolean} true if on lowest rendition
3200 */
3201
3202 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
3203 if (master.playlists.length === 1) {
3204 return true;
3205 }
3206
3207 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3208 return master.playlists.filter(function (playlist) {
3209 if (!isEnabled(playlist)) {
3210 return false;
3211 }
3212
3213 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3214 }).length === 0;
3215 };
3216 var playlistMatch = function playlistMatch(a, b) {
3217 // both playlits are null
3218 // or only one playlist is non-null
3219 // no match
3220 if (!a && !b || !a && b || a && !b) {
3221 return false;
3222 } // playlist objects are the same, match
3223
3224
3225 if (a === b) {
3226 return true;
3227 } // first try to use id as it should be the most
3228 // accurate
3229
3230
3231 if (a.id && b.id && a.id === b.id) {
3232 return true;
3233 } // next try to use reslovedUri as it should be the
3234 // second most accurate.
3235
3236
3237 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
3238 return true;
3239 } // finally try to use uri as it should be accurate
3240 // but might miss a few cases for relative uris
3241
3242
3243 if (a.uri && b.uri && a.uri === b.uri) {
3244 return true;
3245 }
3246
3247 return false;
3248 };
3249
3250 var someAudioVariant = function someAudioVariant(master, callback) {
3251 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
3252 var found = false;
3253
3254 for (var groupName in AUDIO) {
3255 for (var label in AUDIO[groupName]) {
3256 found = callback(AUDIO[groupName][label]);
3257
3258 if (found) {
3259 break;
3260 }
3261 }
3262
3263 if (found) {
3264 break;
3265 }
3266 }
3267
3268 return !!found;
3269 };
3270
3271 var isAudioOnly = function isAudioOnly(master) {
3272 // we are audio only if we have no main playlists but do
3273 // have media group playlists.
3274 if (!master || !master.playlists || !master.playlists.length) {
3275 // without audio variants or playlists this
3276 // is not an audio only master.
3277 var found = someAudioVariant(master, function (variant) {
3278 return variant.playlists && variant.playlists.length || variant.uri;
3279 });
3280 return found;
3281 } // if every playlist has only an audio codec it is audio only
3282
3283
3284 var _loop = function _loop(i) {
3285 var playlist = master.playlists[i];
3286 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
3287
3288 if (CODECS && CODECS.split(',').every(function (c) {
3289 return isAudioCodec(c);
3290 })) {
3291 return "continue";
3292 } // playlist is in an audio group it is audio only
3293
3294
3295 var found = someAudioVariant(master, function (variant) {
3296 return playlistMatch(playlist, variant);
3297 });
3298
3299 if (found) {
3300 return "continue";
3301 } // if we make it here this playlist isn't audio and we
3302 // are not audio only
3303
3304
3305 return {
3306 v: false
3307 };
3308 };
3309
3310 for (var i = 0; i < master.playlists.length; i++) {
3311 var _ret = _loop(i);
3312
3313 if (_ret === "continue") continue;
3314 if (typeof _ret === "object") return _ret.v;
3315 } // if we make it past every playlist without returning, then
3316 // this is an audio only playlist.
3317
3318
3319 return true;
3320 }; // exports
3321
3322 var Playlist = {
3323 liveEdgeDelay: liveEdgeDelay,
3324 duration: duration,
3325 seekable: seekable,
3326 getMediaInfoForTime: getMediaInfoForTime,
3327 isEnabled: isEnabled,
3328 isDisabled: isDisabled,
3329 isBlacklisted: isBlacklisted,
3330 isIncompatible: isIncompatible,
3331 playlistEnd: playlistEnd,
3332 isAes: isAes,
3333 hasAttribute: hasAttribute,
3334 estimateSegmentRequestTime: estimateSegmentRequestTime,
3335 isLowestEnabledRendition: isLowestEnabledRendition,
3336 isAudioOnly: isAudioOnly,
3337 playlistMatch: playlistMatch,
3338 segmentDurationWithParts: segmentDurationWithParts
3339 };
3340
3341 var log = videojs__default["default"].log;
3342 var createPlaylistID = function createPlaylistID(index, uri) {
3343 return index + "-" + uri;
3344 };
3345 /**
3346 * Parses a given m3u8 playlist
3347 *
3348 * @param {Function} [onwarn]
3349 * a function to call when the parser triggers a warning event.
3350 * @param {Function} [oninfo]
3351 * a function to call when the parser triggers an info event.
3352 * @param {string} manifestString
3353 * The downloaded manifest string
3354 * @param {Object[]} [customTagParsers]
3355 * An array of custom tag parsers for the m3u8-parser instance
3356 * @param {Object[]} [customTagMappers]
3357 * An array of custom tag mappers for the m3u8-parser instance
3358 * @param {boolean} [experimentalLLHLS=false]
3359 * Whether to keep ll-hls features in the manifest after parsing.
3360 * @return {Object}
3361 * The manifest object
3362 */
3363
3364 var parseManifest = function parseManifest(_ref) {
3365 var onwarn = _ref.onwarn,
3366 oninfo = _ref.oninfo,
3367 manifestString = _ref.manifestString,
3368 _ref$customTagParsers = _ref.customTagParsers,
3369 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
3370 _ref$customTagMappers = _ref.customTagMappers,
3371 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
3372 experimentalLLHLS = _ref.experimentalLLHLS;
3373 var parser = new Parser();
3374
3375 if (onwarn) {
3376 parser.on('warn', onwarn);
3377 }
3378
3379 if (oninfo) {
3380 parser.on('info', oninfo);
3381 }
3382
3383 customTagParsers.forEach(function (customParser) {
3384 return parser.addParser(customParser);
3385 });
3386 customTagMappers.forEach(function (mapper) {
3387 return parser.addTagMapper(mapper);
3388 });
3389 parser.push(manifestString);
3390 parser.end();
3391 var manifest = parser.manifest; // remove llhls features from the parsed manifest
3392 // if we don't want llhls support.
3393
3394 if (!experimentalLLHLS) {
3395 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
3396 if (manifest.hasOwnProperty(k)) {
3397 delete manifest[k];
3398 }
3399 });
3400
3401 if (manifest.segments) {
3402 manifest.segments.forEach(function (segment) {
3403 ['parts', 'preloadHints'].forEach(function (k) {
3404 if (segment.hasOwnProperty(k)) {
3405 delete segment[k];
3406 }
3407 });
3408 });
3409 }
3410 }
3411
3412 if (!manifest.targetDuration) {
3413 var targetDuration = 10;
3414
3415 if (manifest.segments && manifest.segments.length) {
3416 targetDuration = manifest.segments.reduce(function (acc, s) {
3417 return Math.max(acc, s.duration);
3418 }, 0);
3419 }
3420
3421 if (onwarn) {
3422 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
3423 }
3424
3425 manifest.targetDuration = targetDuration;
3426 }
3427
3428 var parts = getLastParts(manifest);
3429
3430 if (parts.length && !manifest.partTargetDuration) {
3431 var partTargetDuration = parts.reduce(function (acc, p) {
3432 return Math.max(acc, p.duration);
3433 }, 0);
3434
3435 if (onwarn) {
3436 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
3437 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
3438 }
3439
3440 manifest.partTargetDuration = partTargetDuration;
3441 }
3442
3443 return manifest;
3444 };
3445 /**
3446 * Loops through all supported media groups in master and calls the provided
3447 * callback for each group
3448 *
3449 * @param {Object} master
3450 * The parsed master manifest object
3451 * @param {Function} callback
3452 * Callback to call for each media group
3453 */
3454
3455 var forEachMediaGroup$1 = function forEachMediaGroup(master, callback) {
3456 if (!master.mediaGroups) {
3457 return;
3458 }
3459
3460 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
3461 if (!master.mediaGroups[mediaType]) {
3462 return;
3463 }
3464
3465 for (var groupKey in master.mediaGroups[mediaType]) {
3466 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
3467 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
3468 callback(mediaProperties, mediaType, groupKey, labelKey);
3469 }
3470 }
3471 });
3472 };
3473 /**
3474 * Adds properties and attributes to the playlist to keep consistent functionality for
3475 * playlists throughout VHS.
3476 *
3477 * @param {Object} config
3478 * Arguments object
3479 * @param {Object} config.playlist
3480 * The media playlist
3481 * @param {string} [config.uri]
3482 * The uri to the media playlist (if media playlist is not from within a master
3483 * playlist)
3484 * @param {string} id
3485 * ID to use for the playlist
3486 */
3487
3488 var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
3489 var playlist = _ref2.playlist,
3490 uri = _ref2.uri,
3491 id = _ref2.id;
3492 playlist.id = id;
3493 playlist.playlistErrors_ = 0;
3494
3495 if (uri) {
3496 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
3497 // playlists do not contain their own source URI, but one is needed for consistency in
3498 // VHS.
3499 playlist.uri = uri;
3500 } // For HLS master playlists, even though certain attributes MUST be defined, the
3501 // stream may still be played without them.
3502 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
3503 // manifest.
3504 //
3505 // To avoid undefined reference errors through the project, and make the code easier
3506 // to write/read, add an empty attributes object for these cases.
3507
3508
3509 playlist.attributes = playlist.attributes || {};
3510 };
3511 /**
3512 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
3513 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
3514 * playlist references to the playlists array.
3515 *
3516 * @param {Object} master
3517 * The master playlist
3518 */
3519
3520 var setupMediaPlaylists = function setupMediaPlaylists(master) {
3521 var i = master.playlists.length;
3522
3523 while (i--) {
3524 var playlist = master.playlists[i];
3525 setupMediaPlaylist({
3526 playlist: playlist,
3527 id: createPlaylistID(i, playlist.uri)
3528 });
3529 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
3530 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
3531
3532 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
3533 // the stream can be played without it. Although an attributes property may have been
3534 // added to the playlist to prevent undefined references, issue a warning to fix the
3535 // manifest.
3536
3537 if (!playlist.attributes.BANDWIDTH) {
3538 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
3539 }
3540 }
3541 };
3542 /**
3543 * Adds resolvedUri properties to each media group.
3544 *
3545 * @param {Object} master
3546 * The master playlist
3547 */
3548
3549 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
3550 forEachMediaGroup$1(master, function (properties) {
3551 if (properties.uri) {
3552 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
3553 }
3554 });
3555 };
3556 /**
3557 * Creates a master playlist wrapper to insert a sole media playlist into.
3558 *
3559 * @param {Object} media
3560 * Media playlist
3561 * @param {string} uri
3562 * The media URI
3563 *
3564 * @return {Object}
3565 * Master playlist
3566 */
3567
3568 var masterForMedia = function masterForMedia(media, uri) {
3569 var id = createPlaylistID(0, uri);
3570 var master = {
3571 mediaGroups: {
3572 'AUDIO': {},
3573 'VIDEO': {},
3574 'CLOSED-CAPTIONS': {},
3575 'SUBTITLES': {}
3576 },
3577 uri: window.location.href,
3578 resolvedUri: window.location.href,
3579 playlists: [{
3580 uri: uri,
3581 id: id,
3582 resolvedUri: uri,
3583 // m3u8-parser does not attach an attributes property to media playlists so make
3584 // sure that the property is attached to avoid undefined reference errors
3585 attributes: {}
3586 }]
3587 }; // set up ID reference
3588
3589 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
3590
3591 master.playlists[uri] = master.playlists[0];
3592 return master;
3593 };
3594 /**
3595 * Does an in-place update of the master manifest to add updated playlist URI references
3596 * as well as other properties needed by VHS that aren't included by the parser.
3597 *
3598 * @param {Object} master
3599 * Master manifest object
3600 * @param {string} uri
3601 * The source URI
3602 */
3603
3604 var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
3605 master.uri = uri;
3606
3607 for (var i = 0; i < master.playlists.length; i++) {
3608 if (!master.playlists[i].uri) {
3609 // Set up phony URIs for the playlists since playlists are referenced by their URIs
3610 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
3611 // TODO: consider adding dummy URIs in mpd-parser
3612 var phonyUri = "placeholder-uri-" + i;
3613 master.playlists[i].uri = phonyUri;
3614 }
3615 }
3616
3617 var audioOnlyMaster = isAudioOnly(master);
3618 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3619 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
3620
3621 if (!properties.playlists || !properties.playlists.length) {
3622 // If the manifest is audio only and this media group does not have a uri, check
3623 // if the media group is located in the main list of playlists. If it is, don't add
3624 // placeholder properties as it shouldn't be considered an alternate audio track.
3625 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
3626 for (var _i = 0; _i < master.playlists.length; _i++) {
3627 var p = master.playlists[_i];
3628
3629 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
3630 return;
3631 }
3632 }
3633 }
3634
3635 properties.playlists = [_extends_1({}, properties)];
3636 }
3637
3638 properties.playlists.forEach(function (p, i) {
3639 var id = createPlaylistID(i, groupId);
3640
3641 if (p.uri) {
3642 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
3643 } else {
3644 // DEPRECATED, this has been added to prevent a breaking change.
3645 // previously we only ever had a single media group playlist, so
3646 // we mark the first playlist uri without prepending the index as we used to
3647 // ideally we would do all of the playlists the same way.
3648 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
3649 // the placeholder again
3650
3651 p.resolvedUri = p.uri;
3652 }
3653
3654 p.id = p.id || id; // add an empty attributes object, all playlists are
3655 // expected to have this.
3656
3657 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
3658
3659 master.playlists[p.id] = p;
3660 master.playlists[p.uri] = p;
3661 });
3662 });
3663 setupMediaPlaylists(master);
3664 resolveMediaGroupUris(master);
3665 };
3666
3667 var mergeOptions$2 = videojs__default["default"].mergeOptions,
3668 EventTarget$1 = videojs__default["default"].EventTarget;
3669
3670 var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
3671 if (media.endList || !media.serverControl) {
3672 return uri;
3673 }
3674
3675 var parameters = {};
3676
3677 if (media.serverControl.canBlockReload) {
3678 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
3679
3680 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
3681 // that we are going to request a part of that preload segment.
3682 // the logic below is used to determine that.
3683
3684 if (preloadSegment) {
3685 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
3686
3687 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
3688 // length of parts, then we know we had part preload hints
3689 // and we need to add the _HLS_part= query
3690
3691 if (nextPart > -1 && nextPart !== parts.length - 1) {
3692 // add existing parts to our preload hints
3693 // eslint-disable-next-line
3694 parameters._HLS_part = nextPart;
3695 } // this if statement makes sure that we request the msn
3696 // of the preload segment if:
3697 // 1. the preload segment had parts (and was not yet a full segment)
3698 // but was added to our segments array
3699 // 2. the preload segment had preload hints for parts that are not in
3700 // the manifest yet.
3701 // in all other cases we want the segment after the preload segment
3702 // which will be given by using media.segments.length because it is 1 based
3703 // rather than 0 based.
3704
3705
3706 if (nextPart > -1 || parts.length) {
3707 nextMSN--;
3708 }
3709 } // add _HLS_msn= in front of any _HLS_part query
3710 // eslint-disable-next-line
3711
3712
3713 parameters._HLS_msn = nextMSN;
3714 }
3715
3716 if (media.serverControl && media.serverControl.canSkipUntil) {
3717 // add _HLS_skip= infront of all other queries.
3718 // eslint-disable-next-line
3719 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
3720 }
3721
3722 if (Object.keys(parameters).length) {
3723 var parsedUri = new window.URL(uri);
3724 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
3725 if (!parameters.hasOwnProperty(name)) {
3726 return;
3727 }
3728
3729 parsedUri.searchParams.set(name, parameters[name]);
3730 });
3731 uri = parsedUri.toString();
3732 }
3733
3734 return uri;
3735 };
3736 /**
3737 * Returns a new segment object with properties and
3738 * the parts array merged.
3739 *
3740 * @param {Object} a the old segment
3741 * @param {Object} b the new segment
3742 *
3743 * @return {Object} the merged segment
3744 */
3745
3746
3747 var updateSegment = function updateSegment(a, b) {
3748 if (!a) {
3749 return b;
3750 }
3751
3752 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
3753 // and the new one does not, remove preload hints.
3754
3755 if (a.preloadHints && !b.preloadHints) {
3756 delete result.preloadHints;
3757 } // if only the old segment has parts
3758 // then the parts are no longer valid
3759
3760
3761 if (a.parts && !b.parts) {
3762 delete result.parts; // if both segments have parts
3763 // copy part propeties from the old segment
3764 // to the new one.
3765 } else if (a.parts && b.parts) {
3766 for (var i = 0; i < b.parts.length; i++) {
3767 if (a.parts && a.parts[i]) {
3768 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
3769 }
3770 }
3771 } // set skipped to false for segments that have
3772 // have had information merged from the old segment.
3773
3774
3775 if (!a.skipped && b.skipped) {
3776 result.skipped = false;
3777 } // set preload to false for segments that have
3778 // had information added in the new segment.
3779
3780
3781 if (a.preload && !b.preload) {
3782 result.preload = false;
3783 }
3784
3785 return result;
3786 };
3787 /**
3788 * Returns a new array of segments that is the result of merging
3789 * properties from an older list of segments onto an updated
3790 * list. No properties on the updated playlist will be ovewritten.
3791 *
3792 * @param {Array} original the outdated list of segments
3793 * @param {Array} update the updated list of segments
3794 * @param {number=} offset the index of the first update
3795 * segment in the original segment list. For non-live playlists,
3796 * this should always be zero and does not need to be
3797 * specified. For live playlists, it should be the difference
3798 * between the media sequence numbers in the original and updated
3799 * playlists.
3800 * @return {Array} a list of merged segment objects
3801 */
3802
3803 var updateSegments = function updateSegments(original, update, offset) {
3804 var oldSegments = original.slice();
3805 var newSegments = update.slice();
3806 offset = offset || 0;
3807 var result = [];
3808 var currentMap;
3809
3810 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
3811 var oldSegment = oldSegments[newIndex + offset];
3812 var newSegment = newSegments[newIndex];
3813
3814 if (oldSegment) {
3815 currentMap = oldSegment.map || currentMap;
3816 result.push(updateSegment(oldSegment, newSegment));
3817 } else {
3818 // carry over map to new segment if it is missing
3819 if (currentMap && !newSegment.map) {
3820 newSegment.map = currentMap;
3821 }
3822
3823 result.push(newSegment);
3824 }
3825 }
3826
3827 return result;
3828 };
3829 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
3830 // preloadSegment will not have a uri at all
3831 // as the segment isn't actually in the manifest yet, only parts
3832 if (!segment.resolvedUri && segment.uri) {
3833 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
3834 }
3835
3836 if (segment.key && !segment.key.resolvedUri) {
3837 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
3838 }
3839
3840 if (segment.map && !segment.map.resolvedUri) {
3841 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
3842 }
3843
3844 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
3845 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
3846 }
3847
3848 if (segment.parts && segment.parts.length) {
3849 segment.parts.forEach(function (p) {
3850 if (p.resolvedUri) {
3851 return;
3852 }
3853
3854 p.resolvedUri = resolveUrl(baseUri, p.uri);
3855 });
3856 }
3857
3858 if (segment.preloadHints && segment.preloadHints.length) {
3859 segment.preloadHints.forEach(function (p) {
3860 if (p.resolvedUri) {
3861 return;
3862 }
3863
3864 p.resolvedUri = resolveUrl(baseUri, p.uri);
3865 });
3866 }
3867 };
3868
3869 var getAllSegments = function getAllSegments(media) {
3870 var segments = media.segments || [];
3871 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
3872 // a usable segment, only include a preloadSegment that has
3873 // parts.
3874
3875 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
3876 // if preloadHints has a MAP that means that the
3877 // init segment is going to change. We cannot use any of the parts
3878 // from this preload segment.
3879 if (preloadSegment.preloadHints) {
3880 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
3881 if (preloadSegment.preloadHints[i].type === 'MAP') {
3882 return segments;
3883 }
3884 }
3885 } // set the duration for our preload segment to target duration.
3886
3887
3888 preloadSegment.duration = media.targetDuration;
3889 preloadSegment.preload = true;
3890 segments.push(preloadSegment);
3891 }
3892
3893 return segments;
3894 }; // consider the playlist unchanged if the playlist object is the same or
3895 // the number of segments is equal, the media sequence number is unchanged,
3896 // and this playlist hasn't become the end of the playlist
3897
3898
3899 var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
3900 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
3901 };
3902 /**
3903 * Returns a new master playlist that is the result of merging an
3904 * updated media playlist into the original version. If the
3905 * updated media playlist does not match any of the playlist
3906 * entries in the original master playlist, null is returned.
3907 *
3908 * @param {Object} master a parsed master M3U8 object
3909 * @param {Object} media a parsed media M3U8 object
3910 * @return {Object} a new object that represents the original
3911 * master playlist with the updated media playlist merged in, or
3912 * null if the merge produced no change.
3913 */
3914
3915 var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
3916 if (unchangedCheck === void 0) {
3917 unchangedCheck = isPlaylistUnchanged;
3918 }
3919
3920 var result = mergeOptions$2(master, {});
3921 var oldMedia = result.playlists[newMedia.id];
3922
3923 if (!oldMedia) {
3924 return null;
3925 }
3926
3927 if (unchangedCheck(oldMedia, newMedia)) {
3928 return null;
3929 }
3930
3931 newMedia.segments = getAllSegments(newMedia);
3932 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
3933
3934 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
3935 delete mergedPlaylist.preloadSegment;
3936 } // if the update could overlap existing segment information, merge the two segment lists
3937
3938
3939 if (oldMedia.segments) {
3940 if (newMedia.skip) {
3941 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
3942 // old properties into the new segments
3943
3944 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
3945 newMedia.segments.unshift({
3946 skipped: true
3947 });
3948 }
3949 }
3950
3951 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
3952 } // resolve any segment URIs to prevent us from having to do it later
3953
3954
3955 mergedPlaylist.segments.forEach(function (segment) {
3956 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
3957 }); // TODO Right now in the playlists array there are two references to each playlist, one
3958 // that is referenced by index, and one by URI. The index reference may no longer be
3959 // necessary.
3960
3961 for (var _i = 0; _i < result.playlists.length; _i++) {
3962 if (result.playlists[_i].id === newMedia.id) {
3963 result.playlists[_i] = mergedPlaylist;
3964 }
3965 }
3966
3967 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
3968
3969 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
3970
3971 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3972 if (!properties.playlists) {
3973 return;
3974 }
3975
3976 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
3977 if (newMedia.id === properties.playlists[_i2].id) {
3978 properties.playlists[_i2] = mergedPlaylist;
3979 }
3980 }
3981 });
3982 return result;
3983 };
3984 /**
3985 * Calculates the time to wait before refreshing a live playlist
3986 *
3987 * @param {Object} media
3988 * The current media
3989 * @param {boolean} update
3990 * True if there were any updates from the last refresh, false otherwise
3991 * @return {number}
3992 * The time in ms to wait before refreshing the live playlist
3993 */
3994
3995 var refreshDelay = function refreshDelay(media, update) {
3996 var segments = media.segments || [];
3997 var lastSegment = segments[segments.length - 1];
3998 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
3999 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
4000
4001 if (update && lastDuration) {
4002 return lastDuration * 1000;
4003 } // if the playlist is unchanged since the last reload or last segment duration
4004 // cannot be determined, try again after half the target duration
4005
4006
4007 return (media.partTargetDuration || media.targetDuration || 10) * 500;
4008 };
4009 /**
4010 * Load a playlist from a remote location
4011 *
4012 * @class PlaylistLoader
4013 * @extends Stream
4014 * @param {string|Object} src url or object of manifest
4015 * @param {boolean} withCredentials the withCredentials xhr option
4016 * @class
4017 */
4018
4019 var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
4020 inheritsLoose(PlaylistLoader, _EventTarget);
4021
4022 function PlaylistLoader(src, vhs, options) {
4023 var _this;
4024
4025 if (options === void 0) {
4026 options = {};
4027 }
4028
4029 _this = _EventTarget.call(this) || this;
4030
4031 if (!src) {
4032 throw new Error('A non-empty playlist URL or object is required');
4033 }
4034
4035 _this.logger_ = logger('PlaylistLoader');
4036 var _options = options,
4037 _options$withCredenti = _options.withCredentials,
4038 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
4039 _options$handleManife = _options.handleManifestRedirects,
4040 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
4041 _this.src = src;
4042 _this.vhs_ = vhs;
4043 _this.withCredentials = withCredentials;
4044 _this.handleManifestRedirects = handleManifestRedirects;
4045 var vhsOptions = vhs.options_;
4046 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
4047 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
4048 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
4049
4050 if (videojs__default["default"].browser.IE_VERSION) {
4051 _this.experimentalLLHLS = false;
4052 } // initialize the loader state
4053
4054
4055 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
4056
4057 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(assertThisInitialized(_this));
4058
4059 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
4060
4061 return _this;
4062 }
4063
4064 var _proto = PlaylistLoader.prototype;
4065
4066 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
4067 var _this2 = this;
4068
4069 if (this.state !== 'HAVE_METADATA') {
4070 // only refresh the media playlist if no other activity is going on
4071 return;
4072 }
4073
4074 var media = this.media();
4075 var uri = resolveUrl(this.master.uri, media.uri);
4076
4077 if (this.experimentalLLHLS) {
4078 uri = addLLHLSQueryDirectives(uri, media);
4079 }
4080
4081 this.state = 'HAVE_CURRENT_METADATA';
4082 this.request = this.vhs_.xhr({
4083 uri: uri,
4084 withCredentials: this.withCredentials
4085 }, function (error, req) {
4086 // disposed
4087 if (!_this2.request) {
4088 return;
4089 }
4090
4091 if (error) {
4092 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
4093 }
4094
4095 _this2.haveMetadata({
4096 playlistString: _this2.request.responseText,
4097 url: _this2.media().uri,
4098 id: _this2.media().id
4099 });
4100 });
4101 };
4102
4103 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
4104 var uri = playlist.uri,
4105 id = playlist.id; // any in-flight request is now finished
4106
4107 this.request = null;
4108
4109 if (startingState) {
4110 this.state = startingState;
4111 }
4112
4113 this.error = {
4114 playlist: this.master.playlists[id],
4115 status: xhr.status,
4116 message: "HLS playlist request error at URL: " + uri + ".",
4117 responseText: xhr.responseText,
4118 code: xhr.status >= 500 ? 4 : 2
4119 };
4120 this.trigger('error');
4121 };
4122
4123 _proto.parseManifest_ = function parseManifest_(_ref) {
4124 var _this3 = this;
4125
4126 var url = _ref.url,
4127 manifestString = _ref.manifestString;
4128 return parseManifest({
4129 onwarn: function onwarn(_ref2) {
4130 var message = _ref2.message;
4131 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
4132 },
4133 oninfo: function oninfo(_ref3) {
4134 var message = _ref3.message;
4135 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
4136 },
4137 manifestString: manifestString,
4138 customTagParsers: this.customTagParsers,
4139 customTagMappers: this.customTagMappers,
4140 experimentalLLHLS: this.experimentalLLHLS
4141 });
4142 }
4143 /**
4144 * Update the playlist loader's state in response to a new or updated playlist.
4145 *
4146 * @param {string} [playlistString]
4147 * Playlist string (if playlistObject is not provided)
4148 * @param {Object} [playlistObject]
4149 * Playlist object (if playlistString is not provided)
4150 * @param {string} url
4151 * URL of playlist
4152 * @param {string} id
4153 * ID to use for playlist
4154 */
4155 ;
4156
4157 _proto.haveMetadata = function haveMetadata(_ref4) {
4158 var playlistString = _ref4.playlistString,
4159 playlistObject = _ref4.playlistObject,
4160 url = _ref4.url,
4161 id = _ref4.id;
4162 // any in-flight request is now finished
4163 this.request = null;
4164 this.state = 'HAVE_METADATA';
4165 var playlist = playlistObject || this.parseManifest_({
4166 url: url,
4167 manifestString: playlistString
4168 });
4169 playlist.lastRequest = Date.now();
4170 setupMediaPlaylist({
4171 playlist: playlist,
4172 uri: url,
4173 id: id
4174 }); // merge this playlist into the master
4175
4176 var update = updateMaster$1(this.master, playlist);
4177 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
4178 this.pendingMedia_ = null;
4179
4180 if (update) {
4181 this.master = update;
4182 this.media_ = this.master.playlists[id];
4183 } else {
4184 this.trigger('playlistunchanged');
4185 }
4186
4187 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
4188 this.trigger('loadedplaylist');
4189 }
4190 /**
4191 * Abort any outstanding work and clean up.
4192 */
4193 ;
4194
4195 _proto.dispose = function dispose() {
4196 this.trigger('dispose');
4197 this.stopRequest();
4198 window.clearTimeout(this.mediaUpdateTimeout);
4199 window.clearTimeout(this.finalRenditionTimeout);
4200 this.off();
4201 };
4202
4203 _proto.stopRequest = function stopRequest() {
4204 if (this.request) {
4205 var oldRequest = this.request;
4206 this.request = null;
4207 oldRequest.onreadystatechange = null;
4208 oldRequest.abort();
4209 }
4210 }
4211 /**
4212 * When called without any arguments, returns the currently
4213 * active media playlist. When called with a single argument,
4214 * triggers the playlist loader to asynchronously switch to the
4215 * specified media playlist. Calling this method while the
4216 * loader is in the HAVE_NOTHING causes an error to be emitted
4217 * but otherwise has no effect.
4218 *
4219 * @param {Object=} playlist the parsed media playlist
4220 * object to switch to
4221 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
4222 *
4223 * @return {Playlist} the current loaded media
4224 */
4225 ;
4226
4227 _proto.media = function media(playlist, shouldDelay) {
4228 var _this4 = this;
4229
4230 // getter
4231 if (!playlist) {
4232 return this.media_;
4233 } // setter
4234
4235
4236 if (this.state === 'HAVE_NOTHING') {
4237 throw new Error('Cannot switch media playlist from ' + this.state);
4238 } // find the playlist object if the target playlist has been
4239 // specified by URI
4240
4241
4242 if (typeof playlist === 'string') {
4243 if (!this.master.playlists[playlist]) {
4244 throw new Error('Unknown playlist URI: ' + playlist);
4245 }
4246
4247 playlist = this.master.playlists[playlist];
4248 }
4249
4250 window.clearTimeout(this.finalRenditionTimeout);
4251
4252 if (shouldDelay) {
4253 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
4254 this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);
4255 return;
4256 }
4257
4258 var startingState = this.state;
4259 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
4260 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
4261
4262 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
4263 // media playlist or, for the case of demuxed audio, a resolved audio media group)
4264 playlist.endList && playlist.segments.length) {
4265 // abort outstanding playlist requests
4266 if (this.request) {
4267 this.request.onreadystatechange = null;
4268 this.request.abort();
4269 this.request = null;
4270 }
4271
4272 this.state = 'HAVE_METADATA';
4273 this.media_ = playlist; // trigger media change if the active media has been updated
4274
4275 if (mediaChange) {
4276 this.trigger('mediachanging');
4277
4278 if (startingState === 'HAVE_MASTER') {
4279 // The initial playlist was a master manifest, and the first media selected was
4280 // also provided (in the form of a resolved playlist object) as part of the
4281 // source object (rather than just a URL). Therefore, since the media playlist
4282 // doesn't need to be requested, loadedmetadata won't trigger as part of the
4283 // normal flow, and needs an explicit trigger here.
4284 this.trigger('loadedmetadata');
4285 } else {
4286 this.trigger('mediachange');
4287 }
4288 }
4289
4290 return;
4291 } // We update/set the timeout here so that live playlists
4292 // that are not a media change will "start" the loader as expected.
4293 // We expect that this function will start the media update timeout
4294 // cycle again. This also prevents a playlist switch failure from
4295 // causing us to stall during live.
4296
4297
4298 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
4299
4300 if (!mediaChange) {
4301 return;
4302 }
4303
4304 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
4305
4306 if (this.request) {
4307 if (playlist.resolvedUri === this.request.url) {
4308 // requesting to switch to the same playlist multiple times
4309 // has no effect after the first
4310 return;
4311 }
4312
4313 this.request.onreadystatechange = null;
4314 this.request.abort();
4315 this.request = null;
4316 } // request the new playlist
4317
4318
4319 if (this.media_) {
4320 this.trigger('mediachanging');
4321 }
4322
4323 this.pendingMedia_ = playlist;
4324 this.request = this.vhs_.xhr({
4325 uri: playlist.resolvedUri,
4326 withCredentials: this.withCredentials
4327 }, function (error, req) {
4328 // disposed
4329 if (!_this4.request) {
4330 return;
4331 }
4332
4333 playlist.lastRequest = Date.now();
4334 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
4335
4336 if (error) {
4337 return _this4.playlistRequestError(_this4.request, playlist, startingState);
4338 }
4339
4340 _this4.haveMetadata({
4341 playlistString: req.responseText,
4342 url: playlist.uri,
4343 id: playlist.id
4344 }); // fire loadedmetadata the first time a media playlist is loaded
4345
4346
4347 if (startingState === 'HAVE_MASTER') {
4348 _this4.trigger('loadedmetadata');
4349 } else {
4350 _this4.trigger('mediachange');
4351 }
4352 });
4353 }
4354 /**
4355 * pause loading of the playlist
4356 */
4357 ;
4358
4359 _proto.pause = function pause() {
4360 if (this.mediaUpdateTimeout) {
4361 window.clearTimeout(this.mediaUpdateTimeout);
4362 this.mediaUpdateTimeout = null;
4363 }
4364
4365 this.stopRequest();
4366
4367 if (this.state === 'HAVE_NOTHING') {
4368 // If we pause the loader before any data has been retrieved, its as if we never
4369 // started, so reset to an unstarted state.
4370 this.started = false;
4371 } // Need to restore state now that no activity is happening
4372
4373
4374 if (this.state === 'SWITCHING_MEDIA') {
4375 // if the loader was in the process of switching media, it should either return to
4376 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
4377 // playlist yet. This is determined by the existence of loader.media_
4378 if (this.media_) {
4379 this.state = 'HAVE_METADATA';
4380 } else {
4381 this.state = 'HAVE_MASTER';
4382 }
4383 } else if (this.state === 'HAVE_CURRENT_METADATA') {
4384 this.state = 'HAVE_METADATA';
4385 }
4386 }
4387 /**
4388 * start loading of the playlist
4389 */
4390 ;
4391
4392 _proto.load = function load(shouldDelay) {
4393 var _this5 = this;
4394
4395 if (this.mediaUpdateTimeout) {
4396 window.clearTimeout(this.mediaUpdateTimeout);
4397 this.mediaUpdateTimeout = null;
4398 }
4399
4400 var media = this.media();
4401
4402 if (shouldDelay) {
4403 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
4404 this.mediaUpdateTimeout = window.setTimeout(function () {
4405 _this5.mediaUpdateTimeout = null;
4406
4407 _this5.load();
4408 }, delay);
4409 return;
4410 }
4411
4412 if (!this.started) {
4413 this.start();
4414 return;
4415 }
4416
4417 if (media && !media.endList) {
4418 this.trigger('mediaupdatetimeout');
4419 } else {
4420 this.trigger('loadedplaylist');
4421 }
4422 };
4423
4424 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
4425 var _this6 = this;
4426
4427 if (this.mediaUpdateTimeout) {
4428 window.clearTimeout(this.mediaUpdateTimeout);
4429 this.mediaUpdateTimeout = null;
4430 } // we only have use mediaupdatetimeout for live playlists.
4431
4432
4433 if (!this.media() || this.media().endList) {
4434 return;
4435 }
4436
4437 this.mediaUpdateTimeout = window.setTimeout(function () {
4438 _this6.mediaUpdateTimeout = null;
4439
4440 _this6.trigger('mediaupdatetimeout');
4441
4442 _this6.updateMediaUpdateTimeout_(delay);
4443 }, delay);
4444 }
4445 /**
4446 * start loading of the playlist
4447 */
4448 ;
4449
4450 _proto.start = function start() {
4451 var _this7 = this;
4452
4453 this.started = true;
4454
4455 if (typeof this.src === 'object') {
4456 // in the case of an entirely constructed manifest object (meaning there's no actual
4457 // manifest on a server), default the uri to the page's href
4458 if (!this.src.uri) {
4459 this.src.uri = window.location.href;
4460 } // resolvedUri is added on internally after the initial request. Since there's no
4461 // request for pre-resolved manifests, add on resolvedUri here.
4462
4463
4464 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
4465 // request can be skipped (since the top level of the manifest, at a minimum, is
4466 // already available as a parsed manifest object). However, if the manifest object
4467 // represents a master playlist, some media playlists may need to be resolved before
4468 // the starting segment list is available. Therefore, go directly to setup of the
4469 // initial playlist, and let the normal flow continue from there.
4470 //
4471 // Note that the call to setup is asynchronous, as other sections of VHS may assume
4472 // that the first request is asynchronous.
4473
4474 setTimeout(function () {
4475 _this7.setupInitialPlaylist(_this7.src);
4476 }, 0);
4477 return;
4478 } // request the specified URL
4479
4480
4481 this.request = this.vhs_.xhr({
4482 uri: this.src,
4483 withCredentials: this.withCredentials
4484 }, function (error, req) {
4485 // disposed
4486 if (!_this7.request) {
4487 return;
4488 } // clear the loader's request reference
4489
4490
4491 _this7.request = null;
4492
4493 if (error) {
4494 _this7.error = {
4495 status: req.status,
4496 message: "HLS playlist request error at URL: " + _this7.src + ".",
4497 responseText: req.responseText,
4498 // MEDIA_ERR_NETWORK
4499 code: 2
4500 };
4501
4502 if (_this7.state === 'HAVE_NOTHING') {
4503 _this7.started = false;
4504 }
4505
4506 return _this7.trigger('error');
4507 }
4508
4509 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
4510
4511 var manifest = _this7.parseManifest_({
4512 manifestString: req.responseText,
4513 url: _this7.src
4514 });
4515
4516 _this7.setupInitialPlaylist(manifest);
4517 });
4518 };
4519
4520 _proto.srcUri = function srcUri() {
4521 return typeof this.src === 'string' ? this.src : this.src.uri;
4522 }
4523 /**
4524 * Given a manifest object that's either a master or media playlist, trigger the proper
4525 * events and set the state of the playlist loader.
4526 *
4527 * If the manifest object represents a master playlist, `loadedplaylist` will be
4528 * triggered to allow listeners to select a playlist. If none is selected, the loader
4529 * will default to the first one in the playlists array.
4530 *
4531 * If the manifest object represents a media playlist, `loadedplaylist` will be
4532 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
4533 *
4534 * In the case of a media playlist, a master playlist object wrapper with one playlist
4535 * will be created so that all logic can handle playlists in the same fashion (as an
4536 * assumed manifest object schema).
4537 *
4538 * @param {Object} manifest
4539 * The parsed manifest object
4540 */
4541 ;
4542
4543 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
4544 this.state = 'HAVE_MASTER';
4545
4546 if (manifest.playlists) {
4547 this.master = manifest;
4548 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
4549 // then resolve URIs in advance, as they are usually done after a playlist request,
4550 // which may not happen if the playlist is resolved.
4551
4552 manifest.playlists.forEach(function (playlist) {
4553 playlist.segments = getAllSegments(playlist);
4554 playlist.segments.forEach(function (segment) {
4555 resolveSegmentUris(segment, playlist.resolvedUri);
4556 });
4557 });
4558 this.trigger('loadedplaylist');
4559
4560 if (!this.request) {
4561 // no media playlist was specifically selected so start
4562 // from the first listed one
4563 this.media(this.master.playlists[0]);
4564 }
4565
4566 return;
4567 } // In order to support media playlists passed in as vhs-json, the case where the uri
4568 // is not provided as part of the manifest should be considered, and an appropriate
4569 // default used.
4570
4571
4572 var uri = this.srcUri() || window.location.href;
4573 this.master = masterForMedia(manifest, uri);
4574 this.haveMetadata({
4575 playlistObject: manifest,
4576 url: uri,
4577 id: this.master.playlists[0].id
4578 });
4579 this.trigger('loadedmetadata');
4580 };
4581
4582 return PlaylistLoader;
4583 }(EventTarget$1);
4584
4585 /**
4586 * @file xhr.js
4587 */
4588 var videojsXHR = videojs__default["default"].xhr,
4589 mergeOptions$1 = videojs__default["default"].mergeOptions;
4590
4591 var callbackWrapper = function callbackWrapper(request, error, response, callback) {
4592 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
4593
4594 if (!error && reqResponse) {
4595 request.responseTime = Date.now();
4596 request.roundTripTime = request.responseTime - request.requestTime;
4597 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
4598
4599 if (!request.bandwidth) {
4600 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
4601 }
4602 }
4603
4604 if (response.headers) {
4605 request.responseHeaders = response.headers;
4606 } // videojs.xhr now uses a specific code on the error
4607 // object to signal that a request has timed out instead
4608 // of setting a boolean on the request object
4609
4610
4611 if (error && error.code === 'ETIMEDOUT') {
4612 request.timedout = true;
4613 } // videojs.xhr no longer considers status codes outside of 200 and 0
4614 // (for file uris) to be errors, but the old XHR did, so emulate that
4615 // behavior. Status 206 may be used in response to byterange requests.
4616
4617
4618 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
4619 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
4620 }
4621
4622 callback(error, request);
4623 };
4624
4625 var xhrFactory = function xhrFactory() {
4626 var xhr = function XhrFunction(options, callback) {
4627 // Add a default timeout
4628 options = mergeOptions$1({
4629 timeout: 45e3
4630 }, options); // Allow an optional user-specified function to modify the option
4631 // object before we construct the xhr request
4632
4633 var beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest;
4634
4635 if (beforeRequest && typeof beforeRequest === 'function') {
4636 var newOptions = beforeRequest(options);
4637
4638 if (newOptions) {
4639 options = newOptions;
4640 }
4641 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
4642 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
4643
4644
4645 var xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr;
4646 var request = xhrMethod(options, function (error, response) {
4647 return callbackWrapper(request, error, response, callback);
4648 });
4649 var originalAbort = request.abort;
4650
4651 request.abort = function () {
4652 request.aborted = true;
4653 return originalAbort.apply(request, arguments);
4654 };
4655
4656 request.uri = options.uri;
4657 request.requestTime = Date.now();
4658 return request;
4659 };
4660
4661 xhr.original = true;
4662 return xhr;
4663 };
4664 /**
4665 * Turns segment byterange into a string suitable for use in
4666 * HTTP Range requests
4667 *
4668 * @param {Object} byterange - an object with two values defining the start and end
4669 * of a byte-range
4670 */
4671
4672
4673 var byterangeStr = function byterangeStr(byterange) {
4674 // `byterangeEnd` is one less than `offset + length` because the HTTP range
4675 // header uses inclusive ranges
4676 var byterangeEnd;
4677 var byterangeStart = byterange.offset;
4678
4679 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
4680 byterangeEnd = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
4681 } else {
4682 byterangeEnd = byterange.offset + byterange.length - 1;
4683 }
4684
4685 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
4686 };
4687 /**
4688 * Defines headers for use in the xhr request for a particular segment.
4689 *
4690 * @param {Object} segment - a simplified copy of the segmentInfo object
4691 * from SegmentLoader
4692 */
4693
4694 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
4695 var headers = {};
4696
4697 if (segment.byterange) {
4698 headers.Range = byterangeStr(segment.byterange);
4699 }
4700
4701 return headers;
4702 };
4703
4704 var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
4705 var DASH_REGEX = /^application\/dash\+xml/i;
4706 /**
4707 * Returns a string that describes the type of source based on a video source object's
4708 * media type.
4709 *
4710 * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
4711 *
4712 * @param {string} type
4713 * Video source object media type
4714 * @return {('hls'|'dash'|'vhs-json'|null)}
4715 * VHS source type string
4716 */
4717
4718 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
4719 if (MPEGURL_REGEX.test(type)) {
4720 return 'hls';
4721 }
4722
4723 if (DASH_REGEX.test(type)) {
4724 return 'dash';
4725 } // Denotes the special case of a manifest object passed to http-streaming instead of a
4726 // source URL.
4727 //
4728 // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
4729 //
4730 // In this case, vnd stands for vendor, video.js for the organization, VHS for this
4731 // project, and the +json suffix identifies the structure of the media type.
4732
4733
4734 if (type === 'application/vnd.videojs.vhs+json') {
4735 return 'vhs-json';
4736 }
4737
4738 return null;
4739 };
4740
4741 // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
4742 // we used to do this with log2 but BigInt does not support builtin math
4743 // Math.ceil(log2(x));
4744
4745
4746 var countBits = function countBits(x) {
4747 return x.toString(2).length;
4748 }; // count the number of whole bytes it would take to represent a number
4749
4750 var countBytes = function countBytes(x) {
4751 return Math.ceil(countBits(x) / 8);
4752 };
4753 var isArrayBufferView = function isArrayBufferView(obj) {
4754 if (ArrayBuffer.isView === 'function') {
4755 return ArrayBuffer.isView(obj);
4756 }
4757
4758 return obj && obj.buffer instanceof ArrayBuffer;
4759 };
4760 var isTypedArray = function isTypedArray(obj) {
4761 return isArrayBufferView(obj);
4762 };
4763 var toUint8 = function toUint8(bytes) {
4764 if (bytes instanceof Uint8Array) {
4765 return bytes;
4766 }
4767
4768 if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
4769 // any non-number or NaN leads to empty uint8array
4770 // eslint-disable-next-line
4771 if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
4772 bytes = 0;
4773 } else {
4774 bytes = [bytes];
4775 }
4776 }
4777
4778 return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
4779 };
4780 var BigInt = window.BigInt || Number;
4781 var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
4782 (function () {
4783 var a = new Uint16Array([0xFFCC]);
4784 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
4785
4786 if (b[0] === 0xFF) {
4787 return 'big';
4788 }
4789
4790 if (b[0] === 0xCC) {
4791 return 'little';
4792 }
4793
4794 return 'unknown';
4795 })();
4796 var bytesToNumber = function bytesToNumber(bytes, _temp) {
4797 var _ref = _temp === void 0 ? {} : _temp,
4798 _ref$signed = _ref.signed,
4799 signed = _ref$signed === void 0 ? false : _ref$signed,
4800 _ref$le = _ref.le,
4801 le = _ref$le === void 0 ? false : _ref$le;
4802
4803 bytes = toUint8(bytes);
4804 var fn = le ? 'reduce' : 'reduceRight';
4805 var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
4806 var number = obj.call(bytes, function (total, byte, i) {
4807 var exponent = le ? i : Math.abs(i + 1 - bytes.length);
4808 return total + BigInt(byte) * BYTE_TABLE[exponent];
4809 }, BigInt(0));
4810
4811 if (signed) {
4812 var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
4813 number = BigInt(number);
4814
4815 if (number > max) {
4816 number -= max;
4817 number -= max;
4818 number -= BigInt(2);
4819 }
4820 }
4821
4822 return Number(number);
4823 };
4824 var numberToBytes = function numberToBytes(number, _temp2) {
4825 var _ref2 = _temp2 === void 0 ? {} : _temp2,
4826 _ref2$le = _ref2.le,
4827 le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
4828
4829
4830 if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
4831 number = 0;
4832 }
4833
4834 number = BigInt(number);
4835 var byteCount = countBytes(number);
4836 var bytes = new Uint8Array(new ArrayBuffer(byteCount));
4837
4838 for (var i = 0; i < byteCount; i++) {
4839 var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
4840 bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
4841
4842 if (number < 0) {
4843 bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
4844 bytes[byteIndex] -= i === 0 ? 1 : 2;
4845 }
4846 }
4847
4848 return bytes;
4849 };
4850 var stringToBytes = function stringToBytes(string, stringIsBytes) {
4851 if (typeof string !== 'string' && string && typeof string.toString === 'function') {
4852 string = string.toString();
4853 }
4854
4855 if (typeof string !== 'string') {
4856 return new Uint8Array();
4857 } // If the string already is bytes, we don't have to do this
4858 // otherwise we do this so that we split multi length characters
4859 // into individual bytes
4860
4861
4862 if (!stringIsBytes) {
4863 string = unescape(encodeURIComponent(string));
4864 }
4865
4866 var view = new Uint8Array(string.length);
4867
4868 for (var i = 0; i < string.length; i++) {
4869 view[i] = string.charCodeAt(i);
4870 }
4871
4872 return view;
4873 };
4874 var concatTypedArrays = function concatTypedArrays() {
4875 for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
4876 buffers[_key] = arguments[_key];
4877 }
4878
4879 buffers = buffers.filter(function (b) {
4880 return b && (b.byteLength || b.length) && typeof b !== 'string';
4881 });
4882
4883 if (buffers.length <= 1) {
4884 // for 0 length we will return empty uint8
4885 // for 1 length we return the first uint8
4886 return toUint8(buffers[0]);
4887 }
4888
4889 var totalLen = buffers.reduce(function (total, buf, i) {
4890 return total + (buf.byteLength || buf.length);
4891 }, 0);
4892 var tempBuffer = new Uint8Array(totalLen);
4893 var offset = 0;
4894 buffers.forEach(function (buf) {
4895 buf = toUint8(buf);
4896 tempBuffer.set(buf, offset);
4897 offset += buf.byteLength;
4898 });
4899 return tempBuffer;
4900 };
4901 /**
4902 * Check if the bytes "b" are contained within bytes "a".
4903 *
4904 * @param {Uint8Array|Array} a
4905 * Bytes to check in
4906 *
4907 * @param {Uint8Array|Array} b
4908 * Bytes to check for
4909 *
4910 * @param {Object} options
4911 * options
4912 *
4913 * @param {Array|Uint8Array} [offset=0]
4914 * offset to use when looking at bytes in a
4915 *
4916 * @param {Array|Uint8Array} [mask=[]]
4917 * mask to use on bytes before comparison.
4918 *
4919 * @return {boolean}
4920 * If all bytes in b are inside of a, taking into account
4921 * bit masks.
4922 */
4923
4924 var bytesMatch = function bytesMatch(a, b, _temp3) {
4925 var _ref3 = _temp3 === void 0 ? {} : _temp3,
4926 _ref3$offset = _ref3.offset,
4927 offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
4928 _ref3$mask = _ref3.mask,
4929 mask = _ref3$mask === void 0 ? [] : _ref3$mask;
4930
4931 a = toUint8(a);
4932 b = toUint8(b); // ie 11 does not support uint8 every
4933
4934 var fn = b.every ? b.every : Array.prototype.every;
4935 return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
4936 fn.call(b, function (bByte, i) {
4937 var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
4938 return bByte === aByte;
4939 });
4940 };
4941
4942 /**
4943 * @file bin-utils.js
4944 */
4945
4946 /**
4947 * convert a TimeRange to text
4948 *
4949 * @param {TimeRange} range the timerange to use for conversion
4950 * @param {number} i the iterator on the range to convert
4951 * @return {string} the range in string format
4952 */
4953
4954 var textRange = function textRange(range, i) {
4955 return range.start(i) + '-' + range.end(i);
4956 };
4957 /**
4958 * format a number as hex string
4959 *
4960 * @param {number} e The number
4961 * @param {number} i the iterator
4962 * @return {string} the hex formatted number as a string
4963 */
4964
4965
4966 var formatHexString = function formatHexString(e, i) {
4967 var value = e.toString(16);
4968 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
4969 };
4970
4971 var formatAsciiString = function formatAsciiString(e) {
4972 if (e >= 0x20 && e < 0x7e) {
4973 return String.fromCharCode(e);
4974 }
4975
4976 return '.';
4977 };
4978 /**
4979 * Creates an object for sending to a web worker modifying properties that are TypedArrays
4980 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
4981 *
4982 * @param {Object} message
4983 * Object of properties and values to send to the web worker
4984 * @return {Object}
4985 * Modified message with TypedArray values expanded
4986 * @function createTransferableMessage
4987 */
4988
4989
4990 var createTransferableMessage = function createTransferableMessage(message) {
4991 var transferable = {};
4992 Object.keys(message).forEach(function (key) {
4993 var value = message[key];
4994
4995 if (isArrayBufferView(value)) {
4996 transferable[key] = {
4997 bytes: value.buffer,
4998 byteOffset: value.byteOffset,
4999 byteLength: value.byteLength
5000 };
5001 } else {
5002 transferable[key] = value;
5003 }
5004 });
5005 return transferable;
5006 };
5007 /**
5008 * Returns a unique string identifier for a media initialization
5009 * segment.
5010 *
5011 * @param {Object} initSegment
5012 * the init segment object.
5013 *
5014 * @return {string} the generated init segment id
5015 */
5016
5017 var initSegmentId = function initSegmentId(initSegment) {
5018 var byterange = initSegment.byterange || {
5019 length: Infinity,
5020 offset: 0
5021 };
5022 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
5023 };
5024 /**
5025 * Returns a unique string identifier for a media segment key.
5026 *
5027 * @param {Object} key the encryption key
5028 * @return {string} the unique id for the media segment key.
5029 */
5030
5031 var segmentKeyId = function segmentKeyId(key) {
5032 return key.resolvedUri;
5033 };
5034 /**
5035 * utils to help dump binary data to the console
5036 *
5037 * @param {Array|TypedArray} data
5038 * data to dump to a string
5039 *
5040 * @return {string} the data as a hex string.
5041 */
5042
5043 var hexDump = function hexDump(data) {
5044 var bytes = Array.prototype.slice.call(data);
5045 var step = 16;
5046 var result = '';
5047 var hex;
5048 var ascii;
5049
5050 for (var j = 0; j < bytes.length / step; j++) {
5051 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
5052 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
5053 result += hex + ' ' + ascii + '\n';
5054 }
5055
5056 return result;
5057 };
5058 var tagDump = function tagDump(_ref) {
5059 var bytes = _ref.bytes;
5060 return hexDump(bytes);
5061 };
5062 var textRanges = function textRanges(ranges) {
5063 var result = '';
5064 var i;
5065
5066 for (i = 0; i < ranges.length; i++) {
5067 result += textRange(ranges, i) + ' ';
5068 }
5069
5070 return result;
5071 };
5072
5073 var utils = /*#__PURE__*/Object.freeze({
5074 __proto__: null,
5075 createTransferableMessage: createTransferableMessage,
5076 initSegmentId: initSegmentId,
5077 segmentKeyId: segmentKeyId,
5078 hexDump: hexDump,
5079 tagDump: tagDump,
5080 textRanges: textRanges
5081 });
5082
5083 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
5084 // 25% was arbitrarily chosen, and may need to be refined over time.
5085
5086 var SEGMENT_END_FUDGE_PERCENT = 0.25;
5087 /**
5088 * Converts a player time (any time that can be gotten/set from player.currentTime(),
5089 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
5090 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
5091 *
5092 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
5093 * point" (a point where we have a mapping from program time to player time, with player
5094 * time being the post transmux start of the segment).
5095 *
5096 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
5097 *
5098 * @param {number} playerTime the player time
5099 * @param {Object} segment the segment which contains the player time
5100 * @return {Date} program time
5101 */
5102
5103 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
5104 if (!segment.dateTimeObject) {
5105 // Can't convert without an "anchor point" for the program time (i.e., a time that can
5106 // be used to map the start of a segment with a real world time).
5107 return null;
5108 }
5109
5110 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
5111 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
5112
5113 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
5114 var offsetFromSegmentStart = playerTime - startOfSegment;
5115 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
5116 };
5117 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
5118 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
5119 };
5120 /**
5121 * Finds a segment that contains the time requested given as an ISO-8601 string. The
5122 * returned segment might be an estimate or an accurate match.
5123 *
5124 * @param {string} programTime The ISO-8601 programTime to find a match for
5125 * @param {Object} playlist A playlist object to search within
5126 */
5127
5128 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
5129 // Assumptions:
5130 // - verifyProgramDateTimeTags has already been run
5131 // - live streams have been started
5132 var dateTimeObject;
5133
5134 try {
5135 dateTimeObject = new Date(programTime);
5136 } catch (e) {
5137 return null;
5138 }
5139
5140 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5141 return null;
5142 }
5143
5144 var segment = playlist.segments[0];
5145
5146 if (dateTimeObject < segment.dateTimeObject) {
5147 // Requested time is before stream start.
5148 return null;
5149 }
5150
5151 for (var i = 0; i < playlist.segments.length - 1; i++) {
5152 segment = playlist.segments[i];
5153 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
5154
5155 if (dateTimeObject < nextSegmentStart) {
5156 break;
5157 }
5158 }
5159
5160 var lastSegment = playlist.segments[playlist.segments.length - 1];
5161 var lastSegmentStart = lastSegment.dateTimeObject;
5162 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
5163 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
5164
5165 if (dateTimeObject > lastSegmentEnd) {
5166 // Beyond the end of the stream, or our best guess of the end of the stream.
5167 return null;
5168 }
5169
5170 if (dateTimeObject > lastSegmentStart) {
5171 segment = lastSegment;
5172 }
5173
5174 return {
5175 segment: segment,
5176 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
5177 // Although, given that all segments have accurate date time objects, the segment
5178 // selected should be accurate, unless the video has been transmuxed at some point
5179 // (determined by the presence of the videoTimingInfo object), the segment's "player
5180 // time" (the start time in the player) can't be considered accurate.
5181 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5182 };
5183 };
5184 /**
5185 * Finds a segment that contains the given player time(in seconds).
5186 *
5187 * @param {number} time The player time to find a match for
5188 * @param {Object} playlist A playlist object to search within
5189 */
5190
5191 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
5192 // Assumptions:
5193 // - there will always be a segment.duration
5194 // - we can start from zero
5195 // - segments are in time order
5196 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5197 return null;
5198 }
5199
5200 var segmentEnd = 0;
5201 var segment;
5202
5203 for (var i = 0; i < playlist.segments.length; i++) {
5204 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
5205 // should contain the most accurate values we have for the segment's player times.
5206 //
5207 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
5208 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
5209 // calculate an end value.
5210
5211 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
5212
5213 if (time <= segmentEnd) {
5214 break;
5215 }
5216 }
5217
5218 var lastSegment = playlist.segments[playlist.segments.length - 1];
5219
5220 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
5221 // The time requested is beyond the stream end.
5222 return null;
5223 }
5224
5225 if (time > segmentEnd) {
5226 // The time is within or beyond the last segment.
5227 //
5228 // Check to see if the time is beyond a reasonable guess of the end of the stream.
5229 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
5230 // Technically, because the duration value is only an estimate, the time may still
5231 // exist in the last segment, however, there isn't enough information to make even
5232 // a reasonable estimate.
5233 return null;
5234 }
5235
5236 segment = lastSegment;
5237 }
5238
5239 return {
5240 segment: segment,
5241 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
5242 // Because videoTimingInfo is only set after transmux, it is the only way to get
5243 // accurate timing values.
5244 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5245 };
5246 };
5247 /**
5248 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
5249 * If the offset returned is positive, the programTime occurs after the
5250 * comparisonTimestamp.
5251 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
5252 *
5253 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
5254 * @param {string} programTime The programTime as an ISO-8601 string
5255 * @return {number} offset
5256 */
5257
5258 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
5259 var segmentDateTime;
5260 var programDateTime;
5261
5262 try {
5263 segmentDateTime = new Date(comparisonTimeStamp);
5264 programDateTime = new Date(programTime);
5265 } catch (e) {// TODO handle error
5266 }
5267
5268 var segmentTimeEpoch = segmentDateTime.getTime();
5269 var programTimeEpoch = programDateTime.getTime();
5270 return (programTimeEpoch - segmentTimeEpoch) / 1000;
5271 };
5272 /**
5273 * Checks that all segments in this playlist have programDateTime tags.
5274 *
5275 * @param {Object} playlist A playlist object
5276 */
5277
5278 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
5279 if (!playlist.segments || playlist.segments.length === 0) {
5280 return false;
5281 }
5282
5283 for (var i = 0; i < playlist.segments.length; i++) {
5284 var segment = playlist.segments[i];
5285
5286 if (!segment.dateTimeObject) {
5287 return false;
5288 }
5289 }
5290
5291 return true;
5292 };
5293 /**
5294 * Returns the programTime of the media given a playlist and a playerTime.
5295 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
5296 * If the segments containing the time requested have not been buffered yet, an estimate
5297 * may be returned to the callback.
5298 *
5299 * @param {Object} args
5300 * @param {Object} args.playlist A playlist object to search within
5301 * @param {number} time A playerTime in seconds
5302 * @param {Function} callback(err, programTime)
5303 * @return {string} err.message A detailed error message
5304 * @return {Object} programTime
5305 * @return {number} programTime.mediaSeconds The streamTime in seconds
5306 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
5307 */
5308
5309 var getProgramTime = function getProgramTime(_ref) {
5310 var playlist = _ref.playlist,
5311 _ref$time = _ref.time,
5312 time = _ref$time === void 0 ? undefined : _ref$time,
5313 callback = _ref.callback;
5314
5315 if (!callback) {
5316 throw new Error('getProgramTime: callback must be provided');
5317 }
5318
5319 if (!playlist || time === undefined) {
5320 return callback({
5321 message: 'getProgramTime: playlist and time must be provided'
5322 });
5323 }
5324
5325 var matchedSegment = findSegmentForPlayerTime(time, playlist);
5326
5327 if (!matchedSegment) {
5328 return callback({
5329 message: 'valid programTime was not found'
5330 });
5331 }
5332
5333 if (matchedSegment.type === 'estimate') {
5334 return callback({
5335 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
5336 seekTime: matchedSegment.estimatedStart
5337 });
5338 }
5339
5340 var programTimeObject = {
5341 mediaSeconds: time
5342 };
5343 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
5344
5345 if (programTime) {
5346 programTimeObject.programDateTime = programTime.toISOString();
5347 }
5348
5349 return callback(null, programTimeObject);
5350 };
5351 /**
5352 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
5353 *
5354 * @param {Object} args
5355 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
5356 * @param {Object} args.playlist A playlist to look within
5357 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
5358 * @param {Function} args.seekTo A method to perform a seek
5359 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
5360 * @param {Object} args.tech The tech to seek on
5361 * @param {Function} args.callback(err, newTime) A callback to return the new time to
5362 * @return {string} err.message A detailed error message
5363 * @return {number} newTime The exact time that was seeked to in seconds
5364 */
5365
5366 var seekToProgramTime = function seekToProgramTime(_ref2) {
5367 var programTime = _ref2.programTime,
5368 playlist = _ref2.playlist,
5369 _ref2$retryCount = _ref2.retryCount,
5370 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
5371 seekTo = _ref2.seekTo,
5372 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
5373 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
5374 tech = _ref2.tech,
5375 callback = _ref2.callback;
5376
5377 if (!callback) {
5378 throw new Error('seekToProgramTime: callback must be provided');
5379 }
5380
5381 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
5382 return callback({
5383 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
5384 });
5385 }
5386
5387 if (!playlist.endList && !tech.hasStarted_) {
5388 return callback({
5389 message: 'player must be playing a live stream to start buffering'
5390 });
5391 }
5392
5393 if (!verifyProgramDateTimeTags(playlist)) {
5394 return callback({
5395 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
5396 });
5397 }
5398
5399 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
5400
5401 if (!matchedSegment) {
5402 return callback({
5403 message: programTime + " was not found in the stream"
5404 });
5405 }
5406
5407 var segment = matchedSegment.segment;
5408 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
5409
5410 if (matchedSegment.type === 'estimate') {
5411 // we've run out of retries
5412 if (retryCount === 0) {
5413 return callback({
5414 message: programTime + " is not buffered yet. Try again"
5415 });
5416 }
5417
5418 seekTo(matchedSegment.estimatedStart + mediaOffset);
5419 tech.one('seeked', function () {
5420 seekToProgramTime({
5421 programTime: programTime,
5422 playlist: playlist,
5423 retryCount: retryCount - 1,
5424 seekTo: seekTo,
5425 pauseAfterSeek: pauseAfterSeek,
5426 tech: tech,
5427 callback: callback
5428 });
5429 });
5430 return;
5431 } // Since the segment.start value is determined from the buffered end or ending time
5432 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
5433 // modifications.
5434
5435
5436 var seekToTime = segment.start + mediaOffset;
5437
5438 var seekedCallback = function seekedCallback() {
5439 return callback(null, tech.currentTime());
5440 }; // listen for seeked event
5441
5442
5443 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
5444
5445 if (pauseAfterSeek) {
5446 tech.pause();
5447 }
5448
5449 seekTo(seekToTime);
5450 };
5451
5452 /**
5453 * Loops through all supported media groups in master and calls the provided
5454 * callback for each group
5455 *
5456 * @param {Object} master
5457 * The parsed master manifest object
5458 * @param {string[]} groups
5459 * The media groups to call the callback for
5460 * @param {Function} callback
5461 * Callback to call for each media group
5462 */
5463 var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
5464 groups.forEach(function (mediaType) {
5465 for (var groupKey in master.mediaGroups[mediaType]) {
5466 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
5467 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
5468 callback(mediaProperties, mediaType, groupKey, labelKey);
5469 }
5470 }
5471 });
5472 };
5473
5474 /*! @name mpd-parser @version 0.21.1 @license Apache-2.0 */
5475
5476 var isObject = function isObject(obj) {
5477 return !!obj && typeof obj === 'object';
5478 };
5479
5480 var merge = function merge() {
5481 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
5482 objects[_key] = arguments[_key];
5483 }
5484
5485 return objects.reduce(function (result, source) {
5486 if (typeof source !== 'object') {
5487 return result;
5488 }
5489
5490 Object.keys(source).forEach(function (key) {
5491 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
5492 result[key] = result[key].concat(source[key]);
5493 } else if (isObject(result[key]) && isObject(source[key])) {
5494 result[key] = merge(result[key], source[key]);
5495 } else {
5496 result[key] = source[key];
5497 }
5498 });
5499 return result;
5500 }, {});
5501 };
5502
5503 var values = function values(o) {
5504 return Object.keys(o).map(function (k) {
5505 return o[k];
5506 });
5507 };
5508
5509 var range = function range(start, end) {
5510 var result = [];
5511
5512 for (var i = start; i < end; i++) {
5513 result.push(i);
5514 }
5515
5516 return result;
5517 };
5518
5519 var flatten = function flatten(lists) {
5520 return lists.reduce(function (x, y) {
5521 return x.concat(y);
5522 }, []);
5523 };
5524
5525 var from = function from(list) {
5526 if (!list.length) {
5527 return [];
5528 }
5529
5530 var result = [];
5531
5532 for (var i = 0; i < list.length; i++) {
5533 result.push(list[i]);
5534 }
5535
5536 return result;
5537 };
5538
5539 var findIndexes = function findIndexes(l, key) {
5540 return l.reduce(function (a, e, i) {
5541 if (e[key]) {
5542 a.push(i);
5543 }
5544
5545 return a;
5546 }, []);
5547 };
5548 /**
5549 * Returns the first index that satisfies the matching function, or -1 if not found.
5550 *
5551 * Only necessary because of IE11 support.
5552 *
5553 * @param {Array} list - the list to search through
5554 * @param {Function} matchingFunction - the matching function
5555 *
5556 * @return {number} the matching index or -1 if not found
5557 */
5558
5559
5560 var findIndex = function findIndex(list, matchingFunction) {
5561 for (var i = 0; i < list.length; i++) {
5562 if (matchingFunction(list[i])) {
5563 return i;
5564 }
5565 }
5566
5567 return -1;
5568 };
5569 /**
5570 * Returns a union of the included lists provided each element can be identified by a key.
5571 *
5572 * @param {Array} list - list of lists to get the union of
5573 * @param {Function} keyFunction - the function to use as a key for each element
5574 *
5575 * @return {Array} the union of the arrays
5576 */
5577
5578
5579 var union = function union(lists, keyFunction) {
5580 return values(lists.reduce(function (acc, list) {
5581 list.forEach(function (el) {
5582 acc[keyFunction(el)] = el;
5583 });
5584 return acc;
5585 }, {}));
5586 };
5587
5588 var errors = {
5589 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
5590 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
5591 DASH_INVALID_XML: 'DASH_INVALID_XML',
5592 NO_BASE_URL: 'NO_BASE_URL',
5593 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
5594 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
5595 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
5596 };
5597 /**
5598 * @typedef {Object} SingleUri
5599 * @property {string} uri - relative location of segment
5600 * @property {string} resolvedUri - resolved location of segment
5601 * @property {Object} byterange - Object containing information on how to make byte range
5602 * requests following byte-range-spec per RFC2616.
5603 * @property {String} byterange.length - length of range request
5604 * @property {String} byterange.offset - byte offset of range request
5605 *
5606 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
5607 */
5608
5609 /**
5610 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
5611 * that conforms to how m3u8-parser is structured
5612 *
5613 * @see https://github.com/videojs/m3u8-parser
5614 *
5615 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
5616 * @param {string} source - source url for segment
5617 * @param {string} range - optional range used for range calls,
5618 * follows RFC 2616, Clause 14.35.1
5619 * @return {SingleUri} full segment information transformed into a format similar
5620 * to m3u8-parser
5621 */
5622
5623 var urlTypeToSegment = function urlTypeToSegment(_ref) {
5624 var _ref$baseUrl = _ref.baseUrl,
5625 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
5626 _ref$source = _ref.source,
5627 source = _ref$source === void 0 ? '' : _ref$source,
5628 _ref$range = _ref.range,
5629 range = _ref$range === void 0 ? '' : _ref$range,
5630 _ref$indexRange = _ref.indexRange,
5631 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
5632 var segment = {
5633 uri: source,
5634 resolvedUri: resolveUrl$1(baseUrl || '', source)
5635 };
5636
5637 if (range || indexRange) {
5638 var rangeStr = range ? range : indexRange;
5639 var ranges = rangeStr.split('-'); // default to parsing this as a BigInt if possible
5640
5641 var startRange = window.BigInt ? window.BigInt(ranges[0]) : parseInt(ranges[0], 10);
5642 var endRange = window.BigInt ? window.BigInt(ranges[1]) : parseInt(ranges[1], 10); // convert back to a number if less than MAX_SAFE_INTEGER
5643
5644 if (startRange < Number.MAX_SAFE_INTEGER && typeof startRange === 'bigint') {
5645 startRange = Number(startRange);
5646 }
5647
5648 if (endRange < Number.MAX_SAFE_INTEGER && typeof endRange === 'bigint') {
5649 endRange = Number(endRange);
5650 }
5651
5652 var length;
5653
5654 if (typeof endRange === 'bigint' || typeof startRange === 'bigint') {
5655 length = window.BigInt(endRange) - window.BigInt(startRange) + window.BigInt(1);
5656 } else {
5657 length = endRange - startRange + 1;
5658 }
5659
5660 if (typeof length === 'bigint' && length < Number.MAX_SAFE_INTEGER) {
5661 length = Number(length);
5662 } // byterange should be inclusive according to
5663 // RFC 2616, Clause 14.35.1
5664
5665
5666 segment.byterange = {
5667 length: length,
5668 offset: startRange
5669 };
5670 }
5671
5672 return segment;
5673 };
5674
5675 var byteRangeToString = function byteRangeToString(byterange) {
5676 // `endRange` is one less than `offset + length` because the HTTP range
5677 // header uses inclusive ranges
5678 var endRange;
5679
5680 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
5681 endRange = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
5682 } else {
5683 endRange = byterange.offset + byterange.length - 1;
5684 }
5685
5686 return byterange.offset + "-" + endRange;
5687 };
5688 /**
5689 * parse the end number attribue that can be a string
5690 * number, or undefined.
5691 *
5692 * @param {string|number|undefined} endNumber
5693 * The end number attribute.
5694 *
5695 * @return {number|null}
5696 * The result of parsing the end number.
5697 */
5698
5699
5700 var parseEndNumber = function parseEndNumber(endNumber) {
5701 if (endNumber && typeof endNumber !== 'number') {
5702 endNumber = parseInt(endNumber, 10);
5703 }
5704
5705 if (isNaN(endNumber)) {
5706 return null;
5707 }
5708
5709 return endNumber;
5710 };
5711 /**
5712 * Functions for calculating the range of available segments in static and dynamic
5713 * manifests.
5714 */
5715
5716
5717 var segmentRange = {
5718 /**
5719 * Returns the entire range of available segments for a static MPD
5720 *
5721 * @param {Object} attributes
5722 * Inheritied MPD attributes
5723 * @return {{ start: number, end: number }}
5724 * The start and end numbers for available segments
5725 */
5726 static: function _static(attributes) {
5727 var duration = attributes.duration,
5728 _attributes$timescale = attributes.timescale,
5729 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
5730 sourceDuration = attributes.sourceDuration,
5731 periodDuration = attributes.periodDuration;
5732 var endNumber = parseEndNumber(attributes.endNumber);
5733 var segmentDuration = duration / timescale;
5734
5735 if (typeof endNumber === 'number') {
5736 return {
5737 start: 0,
5738 end: endNumber
5739 };
5740 }
5741
5742 if (typeof periodDuration === 'number') {
5743 return {
5744 start: 0,
5745 end: periodDuration / segmentDuration
5746 };
5747 }
5748
5749 return {
5750 start: 0,
5751 end: sourceDuration / segmentDuration
5752 };
5753 },
5754
5755 /**
5756 * Returns the current live window range of available segments for a dynamic MPD
5757 *
5758 * @param {Object} attributes
5759 * Inheritied MPD attributes
5760 * @return {{ start: number, end: number }}
5761 * The start and end numbers for available segments
5762 */
5763 dynamic: function dynamic(attributes) {
5764 var NOW = attributes.NOW,
5765 clientOffset = attributes.clientOffset,
5766 availabilityStartTime = attributes.availabilityStartTime,
5767 _attributes$timescale2 = attributes.timescale,
5768 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
5769 duration = attributes.duration,
5770 _attributes$periodSta = attributes.periodStart,
5771 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
5772 _attributes$minimumUp = attributes.minimumUpdatePeriod,
5773 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
5774 _attributes$timeShift = attributes.timeShiftBufferDepth,
5775 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
5776 var endNumber = parseEndNumber(attributes.endNumber); // clientOffset is passed in at the top level of mpd-parser and is an offset calculated
5777 // after retrieving UTC server time.
5778
5779 var now = (NOW + clientOffset) / 1000; // WC stands for Wall Clock.
5780 // Convert the period start time to EPOCH.
5781
5782 var periodStartWC = availabilityStartTime + periodStart; // Period end in EPOCH is manifest's retrieval time + time until next update.
5783
5784 var periodEndWC = now + minimumUpdatePeriod;
5785 var periodDuration = periodEndWC - periodStartWC;
5786 var segmentCount = Math.ceil(periodDuration * timescale / duration);
5787 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
5788 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
5789 return {
5790 start: Math.max(0, availableStart),
5791 end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)
5792 };
5793 }
5794 };
5795 /**
5796 * Maps a range of numbers to objects with information needed to build the corresponding
5797 * segment list
5798 *
5799 * @name toSegmentsCallback
5800 * @function
5801 * @param {number} number
5802 * Number of the segment
5803 * @param {number} index
5804 * Index of the number in the range list
5805 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
5806 * Object with segment timing and duration info
5807 */
5808
5809 /**
5810 * Returns a callback for Array.prototype.map for mapping a range of numbers to
5811 * information needed to build the segment list.
5812 *
5813 * @param {Object} attributes
5814 * Inherited MPD attributes
5815 * @return {toSegmentsCallback}
5816 * Callback map function
5817 */
5818
5819 var toSegments = function toSegments(attributes) {
5820 return function (number) {
5821 var duration = attributes.duration,
5822 _attributes$timescale3 = attributes.timescale,
5823 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
5824 periodStart = attributes.periodStart,
5825 _attributes$startNumb = attributes.startNumber,
5826 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
5827 return {
5828 number: startNumber + number,
5829 duration: duration / timescale,
5830 timeline: periodStart,
5831 time: number * duration
5832 };
5833 };
5834 };
5835 /**
5836 * Returns a list of objects containing segment timing and duration info used for
5837 * building the list of segments. This uses the @duration attribute specified
5838 * in the MPD manifest to derive the range of segments.
5839 *
5840 * @param {Object} attributes
5841 * Inherited MPD attributes
5842 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5843 * List of Objects with segment timing and duration info
5844 */
5845
5846
5847 var parseByDuration = function parseByDuration(attributes) {
5848 var type = attributes.type,
5849 duration = attributes.duration,
5850 _attributes$timescale4 = attributes.timescale,
5851 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
5852 periodDuration = attributes.periodDuration,
5853 sourceDuration = attributes.sourceDuration;
5854
5855 var _segmentRange$type = segmentRange[type](attributes),
5856 start = _segmentRange$type.start,
5857 end = _segmentRange$type.end;
5858
5859 var segments = range(start, end).map(toSegments(attributes));
5860
5861 if (type === 'static') {
5862 var index = segments.length - 1; // section is either a period or the full source
5863
5864 var sectionDuration = typeof periodDuration === 'number' ? periodDuration : sourceDuration; // final segment may be less than full segment duration
5865
5866 segments[index].duration = sectionDuration - duration / timescale * index;
5867 }
5868
5869 return segments;
5870 };
5871 /**
5872 * Translates SegmentBase into a set of segments.
5873 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
5874 * node should be translated into segment.
5875 *
5876 * @param {Object} attributes
5877 * Object containing all inherited attributes from parent elements with attribute
5878 * names as keys
5879 * @return {Object.<Array>} list of segments
5880 */
5881
5882
5883 var segmentsFromBase = function segmentsFromBase(attributes) {
5884 var baseUrl = attributes.baseUrl,
5885 _attributes$initializ = attributes.initialization,
5886 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
5887 sourceDuration = attributes.sourceDuration,
5888 _attributes$indexRang = attributes.indexRange,
5889 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
5890 periodStart = attributes.periodStart,
5891 presentationTime = attributes.presentationTime,
5892 _attributes$number = attributes.number,
5893 number = _attributes$number === void 0 ? 0 : _attributes$number,
5894 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
5895
5896 if (!baseUrl) {
5897 throw new Error(errors.NO_BASE_URL);
5898 }
5899
5900 var initSegment = urlTypeToSegment({
5901 baseUrl: baseUrl,
5902 source: initialization.sourceURL,
5903 range: initialization.range
5904 });
5905 var segment = urlTypeToSegment({
5906 baseUrl: baseUrl,
5907 source: baseUrl,
5908 indexRange: indexRange
5909 });
5910 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
5911 // (since SegmentBase is only for one total segment)
5912
5913 if (duration) {
5914 var segmentTimeInfo = parseByDuration(attributes);
5915
5916 if (segmentTimeInfo.length) {
5917 segment.duration = segmentTimeInfo[0].duration;
5918 segment.timeline = segmentTimeInfo[0].timeline;
5919 }
5920 } else if (sourceDuration) {
5921 segment.duration = sourceDuration;
5922 segment.timeline = periodStart;
5923 } // If presentation time is provided, these segments are being generated by SIDX
5924 // references, and should use the time provided. For the general case of SegmentBase,
5925 // there should only be one segment in the period, so its presentation time is the same
5926 // as its period start.
5927
5928
5929 segment.presentationTime = presentationTime || periodStart;
5930 segment.number = number;
5931 return [segment];
5932 };
5933 /**
5934 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
5935 * according to the sidx information given.
5936 *
5937 * playlist.sidx has metadadata about the sidx where-as the sidx param
5938 * is the parsed sidx box itself.
5939 *
5940 * @param {Object} playlist the playlist to update the sidx information for
5941 * @param {Object} sidx the parsed sidx box
5942 * @return {Object} the playlist object with the updated sidx information
5943 */
5944
5945
5946 var addSidxSegmentsToPlaylist$1 = function addSidxSegmentsToPlaylist(playlist, sidx, baseUrl) {
5947 // Retain init segment information
5948 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial main manifest parsing
5949
5950 var sourceDuration = playlist.sidx.duration; // Retain source timeline
5951
5952 var timeline = playlist.timeline || 0;
5953 var sidxByteRange = playlist.sidx.byterange;
5954 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
5955
5956 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
5957
5958 var mediaReferences = sidx.references.filter(function (r) {
5959 return r.referenceType !== 1;
5960 });
5961 var segments = [];
5962 var type = playlist.endList ? 'static' : 'dynamic';
5963 var periodStart = playlist.sidx.timeline;
5964 var presentationTime = periodStart;
5965 var number = playlist.mediaSequence || 0; // firstOffset is the offset from the end of the sidx box
5966
5967 var startIndex; // eslint-disable-next-line
5968
5969 if (typeof sidx.firstOffset === 'bigint') {
5970 startIndex = window.BigInt(sidxEnd) + sidx.firstOffset;
5971 } else {
5972 startIndex = sidxEnd + sidx.firstOffset;
5973 }
5974
5975 for (var i = 0; i < mediaReferences.length; i++) {
5976 var reference = sidx.references[i]; // size of the referenced (sub)segment
5977
5978 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
5979 // this will be converted to seconds when generating segments
5980
5981 var duration = reference.subsegmentDuration; // should be an inclusive range
5982
5983 var endIndex = void 0; // eslint-disable-next-line
5984
5985 if (typeof startIndex === 'bigint') {
5986 endIndex = startIndex + window.BigInt(size) - window.BigInt(1);
5987 } else {
5988 endIndex = startIndex + size - 1;
5989 }
5990
5991 var indexRange = startIndex + "-" + endIndex;
5992 var attributes = {
5993 baseUrl: baseUrl,
5994 timescale: timescale,
5995 timeline: timeline,
5996 periodStart: periodStart,
5997 presentationTime: presentationTime,
5998 number: number,
5999 duration: duration,
6000 sourceDuration: sourceDuration,
6001 indexRange: indexRange,
6002 type: type
6003 };
6004 var segment = segmentsFromBase(attributes)[0];
6005
6006 if (initSegment) {
6007 segment.map = initSegment;
6008 }
6009
6010 segments.push(segment);
6011
6012 if (typeof startIndex === 'bigint') {
6013 startIndex += window.BigInt(size);
6014 } else {
6015 startIndex += size;
6016 }
6017
6018 presentationTime += duration / timescale;
6019 number++;
6020 }
6021
6022 playlist.segments = segments;
6023 return playlist;
6024 };
6025
6026 var SUPPORTED_MEDIA_TYPES = ['AUDIO', 'SUBTITLES']; // allow one 60fps frame as leniency (arbitrarily chosen)
6027
6028 var TIME_FUDGE = 1 / 60;
6029 /**
6030 * Given a list of timelineStarts, combines, dedupes, and sorts them.
6031 *
6032 * @param {TimelineStart[]} timelineStarts - list of timeline starts
6033 *
6034 * @return {TimelineStart[]} the combined and deduped timeline starts
6035 */
6036
6037 var getUniqueTimelineStarts = function getUniqueTimelineStarts(timelineStarts) {
6038 return union(timelineStarts, function (_ref) {
6039 var timeline = _ref.timeline;
6040 return timeline;
6041 }).sort(function (a, b) {
6042 return a.timeline > b.timeline ? 1 : -1;
6043 });
6044 };
6045 /**
6046 * Finds the playlist with the matching NAME attribute.
6047 *
6048 * @param {Array} playlists - playlists to search through
6049 * @param {string} name - the NAME attribute to search for
6050 *
6051 * @return {Object|null} the matching playlist object, or null
6052 */
6053
6054
6055 var findPlaylistWithName = function findPlaylistWithName(playlists, name) {
6056 for (var i = 0; i < playlists.length; i++) {
6057 if (playlists[i].attributes.NAME === name) {
6058 return playlists[i];
6059 }
6060 }
6061
6062 return null;
6063 };
6064 /**
6065 * Gets a flattened array of media group playlists.
6066 *
6067 * @param {Object} manifest - the main manifest object
6068 *
6069 * @return {Array} the media group playlists
6070 */
6071
6072
6073 var getMediaGroupPlaylists = function getMediaGroupPlaylists(manifest) {
6074 var mediaGroupPlaylists = [];
6075 forEachMediaGroup(manifest, SUPPORTED_MEDIA_TYPES, function (properties, type, group, label) {
6076 mediaGroupPlaylists = mediaGroupPlaylists.concat(properties.playlists || []);
6077 });
6078 return mediaGroupPlaylists;
6079 };
6080 /**
6081 * Updates the playlist's media sequence numbers.
6082 *
6083 * @param {Object} config - options object
6084 * @param {Object} config.playlist - the playlist to update
6085 * @param {number} config.mediaSequence - the mediaSequence number to start with
6086 */
6087
6088
6089 var updateMediaSequenceForPlaylist = function updateMediaSequenceForPlaylist(_ref2) {
6090 var playlist = _ref2.playlist,
6091 mediaSequence = _ref2.mediaSequence;
6092 playlist.mediaSequence = mediaSequence;
6093 playlist.segments.forEach(function (segment, index) {
6094 segment.number = playlist.mediaSequence + index;
6095 });
6096 };
6097 /**
6098 * Updates the media and discontinuity sequence numbers of newPlaylists given oldPlaylists
6099 * and a complete list of timeline starts.
6100 *
6101 * If no matching playlist is found, only the discontinuity sequence number of the playlist
6102 * will be updated.
6103 *
6104 * Since early available timelines are not supported, at least one segment must be present.
6105 *
6106 * @param {Object} config - options object
6107 * @param {Object[]} oldPlaylists - the old playlists to use as a reference
6108 * @param {Object[]} newPlaylists - the new playlists to update
6109 * @param {Object} timelineStarts - all timelineStarts seen in the stream to this point
6110 */
6111
6112
6113 var updateSequenceNumbers = function updateSequenceNumbers(_ref3) {
6114 var oldPlaylists = _ref3.oldPlaylists,
6115 newPlaylists = _ref3.newPlaylists,
6116 timelineStarts = _ref3.timelineStarts;
6117 newPlaylists.forEach(function (playlist) {
6118 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref4) {
6119 var timeline = _ref4.timeline;
6120 return timeline === playlist.timeline;
6121 }); // Playlists NAMEs come from DASH Representation IDs, which are mandatory
6122 // (see ISO_23009-1-2012 5.3.5.2).
6123 //
6124 // If the same Representation existed in a prior Period, it will retain the same NAME.
6125
6126 var oldPlaylist = findPlaylistWithName(oldPlaylists, playlist.attributes.NAME);
6127
6128 if (!oldPlaylist) {
6129 // Since this is a new playlist, the media sequence values can start from 0 without
6130 // consequence.
6131 return;
6132 } // TODO better support for live SIDX
6133 //
6134 // As of this writing, mpd-parser does not support multiperiod SIDX (in live or VOD).
6135 // This is evident by a playlist only having a single SIDX reference. In a multiperiod
6136 // playlist there would need to be multiple SIDX references. In addition, live SIDX is
6137 // not supported when the SIDX properties change on refreshes.
6138 //
6139 // In the future, if support needs to be added, the merging logic here can be called
6140 // after SIDX references are resolved. For now, exit early to prevent exceptions being
6141 // thrown due to undefined references.
6142
6143
6144 if (playlist.sidx) {
6145 return;
6146 } // Since we don't yet support early available timelines, we don't need to support
6147 // playlists with no segments.
6148
6149
6150 var firstNewSegment = playlist.segments[0];
6151 var oldMatchingSegmentIndex = findIndex(oldPlaylist.segments, function (oldSegment) {
6152 return Math.abs(oldSegment.presentationTime - firstNewSegment.presentationTime) < TIME_FUDGE;
6153 }); // No matching segment from the old playlist means the entire playlist was refreshed.
6154 // In this case the media sequence should account for this update, and the new segments
6155 // should be marked as discontinuous from the prior content, since the last prior
6156 // timeline was removed.
6157
6158 if (oldMatchingSegmentIndex === -1) {
6159 updateMediaSequenceForPlaylist({
6160 playlist: playlist,
6161 mediaSequence: oldPlaylist.mediaSequence + oldPlaylist.segments.length
6162 });
6163 playlist.segments[0].discontinuity = true;
6164 playlist.discontinuityStarts.unshift(0); // No matching segment does not necessarily mean there's missing content.
6165 //
6166 // If the new playlist's timeline is the same as the last seen segment's timeline,
6167 // then a discontinuity can be added to identify that there's potentially missing
6168 // content. If there's no missing content, the discontinuity should still be rather
6169 // harmless. It's possible that if segment durations are accurate enough, that the
6170 // existence of a gap can be determined using the presentation times and durations,
6171 // but if the segment timing info is off, it may introduce more problems than simply
6172 // adding the discontinuity.
6173 //
6174 // If the new playlist's timeline is different from the last seen segment's timeline,
6175 // then a discontinuity can be added to identify that this is the first seen segment
6176 // of a new timeline. However, the logic at the start of this function that
6177 // determined the disconinuity sequence by timeline index is now off by one (the
6178 // discontinuity of the newest timeline hasn't yet fallen off the manifest...since
6179 // we added it), so the disconinuity sequence must be decremented.
6180 //
6181 // A period may also have a duration of zero, so the case of no segments is handled
6182 // here even though we don't yet support early available periods.
6183
6184 if (!oldPlaylist.segments.length && playlist.timeline > oldPlaylist.timeline || oldPlaylist.segments.length && playlist.timeline > oldPlaylist.segments[oldPlaylist.segments.length - 1].timeline) {
6185 playlist.discontinuitySequence--;
6186 }
6187
6188 return;
6189 } // If the first segment matched with a prior segment on a discontinuity (it's matching
6190 // on the first segment of a period), then the discontinuitySequence shouldn't be the
6191 // timeline's matching one, but instead should be the one prior, and the first segment
6192 // of the new manifest should be marked with a discontinuity.
6193 //
6194 // The reason for this special case is that discontinuity sequence shows how many
6195 // discontinuities have fallen off of the playlist, and discontinuities are marked on
6196 // the first segment of a new "timeline." Because of this, while DASH will retain that
6197 // Period while the "timeline" exists, HLS keeps track of it via the discontinuity
6198 // sequence, and that first segment is an indicator, but can be removed before that
6199 // timeline is gone.
6200
6201
6202 var oldMatchingSegment = oldPlaylist.segments[oldMatchingSegmentIndex];
6203
6204 if (oldMatchingSegment.discontinuity && !firstNewSegment.discontinuity) {
6205 firstNewSegment.discontinuity = true;
6206 playlist.discontinuityStarts.unshift(0);
6207 playlist.discontinuitySequence--;
6208 }
6209
6210 updateMediaSequenceForPlaylist({
6211 playlist: playlist,
6212 mediaSequence: oldPlaylist.segments[oldMatchingSegmentIndex].number
6213 });
6214 });
6215 };
6216 /**
6217 * Given an old parsed manifest object and a new parsed manifest object, updates the
6218 * sequence and timing values within the new manifest to ensure that it lines up with the
6219 * old.
6220 *
6221 * @param {Array} oldManifest - the old main manifest object
6222 * @param {Array} newManifest - the new main manifest object
6223 *
6224 * @return {Object} the updated new manifest object
6225 */
6226
6227
6228 var positionManifestOnTimeline = function positionManifestOnTimeline(_ref5) {
6229 var oldManifest = _ref5.oldManifest,
6230 newManifest = _ref5.newManifest; // Starting from v4.1.2 of the IOP, section 4.4.3.3 states:
6231 //
6232 // "MPD@availabilityStartTime and Period@start shall not be changed over MPD updates."
6233 //
6234 // This was added from https://github.com/Dash-Industry-Forum/DASH-IF-IOP/issues/160
6235 //
6236 // Because of this change, and the difficulty of supporting periods with changing start
6237 // times, periods with changing start times are not supported. This makes the logic much
6238 // simpler, since periods with the same start time can be considerred the same period
6239 // across refreshes.
6240 //
6241 // To give an example as to the difficulty of handling periods where the start time may
6242 // change, if a single period manifest is refreshed with another manifest with a single
6243 // period, and both the start and end times are increased, then the only way to determine
6244 // if it's a new period or an old one that has changed is to look through the segments of
6245 // each playlist and determine the presentation time bounds to find a match. In addition,
6246 // if the period start changed to exceed the old period end, then there would be no
6247 // match, and it would not be possible to determine whether the refreshed period is a new
6248 // one or the old one.
6249
6250 var oldPlaylists = oldManifest.playlists.concat(getMediaGroupPlaylists(oldManifest));
6251 var newPlaylists = newManifest.playlists.concat(getMediaGroupPlaylists(newManifest)); // Save all seen timelineStarts to the new manifest. Although this potentially means that
6252 // there's a "memory leak" in that it will never stop growing, in reality, only a couple
6253 // of properties are saved for each seen Period. Even long running live streams won't
6254 // generate too many Periods, unless the stream is watched for decades. In the future,
6255 // this can be optimized by mapping to discontinuity sequence numbers for each timeline,
6256 // but it may not become an issue, and the additional info can be useful for debugging.
6257
6258 newManifest.timelineStarts = getUniqueTimelineStarts([oldManifest.timelineStarts, newManifest.timelineStarts]);
6259 updateSequenceNumbers({
6260 oldPlaylists: oldPlaylists,
6261 newPlaylists: newPlaylists,
6262 timelineStarts: newManifest.timelineStarts
6263 });
6264 return newManifest;
6265 };
6266
6267 var generateSidxKey = function generateSidxKey(sidx) {
6268 return sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);
6269 };
6270
6271 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
6272 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
6273 // assuming playlist IDs are the same across periods
6274 // TODO: handle multiperiod where representation sets are not the same
6275 // across periods
6276 var name = playlist.attributes.id + (playlist.attributes.lang || '');
6277
6278 if (!acc[name]) {
6279 // First Period
6280 acc[name] = playlist;
6281 acc[name].attributes.timelineStarts = [];
6282 } else {
6283 // Subsequent Periods
6284 if (playlist.segments) {
6285 var _acc$name$segments; // first segment of subsequent periods signal a discontinuity
6286
6287
6288 if (playlist.segments[0]) {
6289 playlist.segments[0].discontinuity = true;
6290 }
6291
6292 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments);
6293 } // bubble up contentProtection, this assumes all DRM content
6294 // has the same contentProtection
6295
6296
6297 if (playlist.attributes.contentProtection) {
6298 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
6299 }
6300 }
6301
6302 acc[name].attributes.timelineStarts.push({
6303 // Although they represent the same number, it's important to have both to make it
6304 // compatible with HLS potentially having a similar attribute.
6305 start: playlist.attributes.periodStart,
6306 timeline: playlist.attributes.periodStart
6307 });
6308 return acc;
6309 }, {}));
6310 return mergedPlaylists.map(function (playlist) {
6311 playlist.discontinuityStarts = findIndexes(playlist.segments || [], 'discontinuity');
6312 return playlist;
6313 });
6314 };
6315
6316 var addSidxSegmentsToPlaylist = function addSidxSegmentsToPlaylist(playlist, sidxMapping) {
6317 var sidxKey = generateSidxKey(playlist.sidx);
6318 var sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
6319
6320 if (sidxMatch) {
6321 addSidxSegmentsToPlaylist$1(playlist, sidxMatch, playlist.sidx.resolvedUri);
6322 }
6323
6324 return playlist;
6325 };
6326
6327 var addSidxSegmentsToPlaylists = function addSidxSegmentsToPlaylists(playlists, sidxMapping) {
6328 if (sidxMapping === void 0) {
6329 sidxMapping = {};
6330 }
6331
6332 if (!Object.keys(sidxMapping).length) {
6333 return playlists;
6334 }
6335
6336 for (var i in playlists) {
6337 playlists[i] = addSidxSegmentsToPlaylist(playlists[i], sidxMapping);
6338 }
6339
6340 return playlists;
6341 };
6342
6343 var formatAudioPlaylist = function formatAudioPlaylist(_ref, isAudioOnly) {
6344 var _attributes;
6345
6346 var attributes = _ref.attributes,
6347 segments = _ref.segments,
6348 sidx = _ref.sidx,
6349 mediaSequence = _ref.mediaSequence,
6350 discontinuitySequence = _ref.discontinuitySequence,
6351 discontinuityStarts = _ref.discontinuityStarts;
6352 var playlist = {
6353 attributes: (_attributes = {
6354 NAME: attributes.id,
6355 BANDWIDTH: attributes.bandwidth,
6356 CODECS: attributes.codecs
6357 }, _attributes['PROGRAM-ID'] = 1, _attributes),
6358 uri: '',
6359 endList: attributes.type === 'static',
6360 timeline: attributes.periodStart,
6361 resolvedUri: '',
6362 targetDuration: attributes.duration,
6363 discontinuitySequence: discontinuitySequence,
6364 discontinuityStarts: discontinuityStarts,
6365 timelineStarts: attributes.timelineStarts,
6366 mediaSequence: mediaSequence,
6367 segments: segments
6368 };
6369
6370 if (attributes.contentProtection) {
6371 playlist.contentProtection = attributes.contentProtection;
6372 }
6373
6374 if (sidx) {
6375 playlist.sidx = sidx;
6376 }
6377
6378 if (isAudioOnly) {
6379 playlist.attributes.AUDIO = 'audio';
6380 playlist.attributes.SUBTITLES = 'subs';
6381 }
6382
6383 return playlist;
6384 };
6385
6386 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
6387 var _m3u8Attributes;
6388
6389 var attributes = _ref2.attributes,
6390 segments = _ref2.segments,
6391 mediaSequence = _ref2.mediaSequence,
6392 discontinuityStarts = _ref2.discontinuityStarts,
6393 discontinuitySequence = _ref2.discontinuitySequence;
6394
6395 if (typeof segments === 'undefined') {
6396 // vtt tracks may use single file in BaseURL
6397 segments = [{
6398 uri: attributes.baseUrl,
6399 timeline: attributes.periodStart,
6400 resolvedUri: attributes.baseUrl || '',
6401 duration: attributes.sourceDuration,
6402 number: 0
6403 }]; // targetDuration should be the same duration as the only segment
6404
6405 attributes.duration = attributes.sourceDuration;
6406 }
6407
6408 var m3u8Attributes = (_m3u8Attributes = {
6409 NAME: attributes.id,
6410 BANDWIDTH: attributes.bandwidth
6411 }, _m3u8Attributes['PROGRAM-ID'] = 1, _m3u8Attributes);
6412
6413 if (attributes.codecs) {
6414 m3u8Attributes.CODECS = attributes.codecs;
6415 }
6416
6417 return {
6418 attributes: m3u8Attributes,
6419 uri: '',
6420 endList: attributes.type === 'static',
6421 timeline: attributes.periodStart,
6422 resolvedUri: attributes.baseUrl || '',
6423 targetDuration: attributes.duration,
6424 timelineStarts: attributes.timelineStarts,
6425 discontinuityStarts: discontinuityStarts,
6426 discontinuitySequence: discontinuitySequence,
6427 mediaSequence: mediaSequence,
6428 segments: segments
6429 };
6430 };
6431
6432 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping, isAudioOnly) {
6433 if (sidxMapping === void 0) {
6434 sidxMapping = {};
6435 }
6436
6437 if (isAudioOnly === void 0) {
6438 isAudioOnly = false;
6439 }
6440
6441 var mainPlaylist;
6442 var formattedPlaylists = playlists.reduce(function (a, playlist) {
6443 var role = playlist.attributes.role && playlist.attributes.role.value || '';
6444 var language = playlist.attributes.lang || '';
6445 var label = playlist.attributes.label || 'main';
6446
6447 if (language && !playlist.attributes.label) {
6448 var roleLabel = role ? " (" + role + ")" : '';
6449 label = "" + playlist.attributes.lang + roleLabel;
6450 }
6451
6452 if (!a[label]) {
6453 a[label] = {
6454 language: language,
6455 autoselect: true,
6456 default: role === 'main',
6457 playlists: [],
6458 uri: ''
6459 };
6460 }
6461
6462 var formatted = addSidxSegmentsToPlaylist(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);
6463 a[label].playlists.push(formatted);
6464
6465 if (typeof mainPlaylist === 'undefined' && role === 'main') {
6466 mainPlaylist = playlist;
6467 mainPlaylist.default = true;
6468 }
6469
6470 return a;
6471 }, {}); // if no playlists have role "main", mark the first as main
6472
6473 if (!mainPlaylist) {
6474 var firstLabel = Object.keys(formattedPlaylists)[0];
6475 formattedPlaylists[firstLabel].default = true;
6476 }
6477
6478 return formattedPlaylists;
6479 };
6480
6481 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
6482 if (sidxMapping === void 0) {
6483 sidxMapping = {};
6484 }
6485
6486 return playlists.reduce(function (a, playlist) {
6487 var label = playlist.attributes.lang || 'text';
6488
6489 if (!a[label]) {
6490 a[label] = {
6491 language: label,
6492 default: false,
6493 autoselect: false,
6494 playlists: [],
6495 uri: ''
6496 };
6497 }
6498
6499 a[label].playlists.push(addSidxSegmentsToPlaylist(formatVttPlaylist(playlist), sidxMapping));
6500 return a;
6501 }, {});
6502 };
6503
6504 var organizeCaptionServices = function organizeCaptionServices(captionServices) {
6505 return captionServices.reduce(function (svcObj, svc) {
6506 if (!svc) {
6507 return svcObj;
6508 }
6509
6510 svc.forEach(function (service) {
6511 var channel = service.channel,
6512 language = service.language;
6513 svcObj[language] = {
6514 autoselect: false,
6515 default: false,
6516 instreamId: channel,
6517 language: language
6518 };
6519
6520 if (service.hasOwnProperty('aspectRatio')) {
6521 svcObj[language].aspectRatio = service.aspectRatio;
6522 }
6523
6524 if (service.hasOwnProperty('easyReader')) {
6525 svcObj[language].easyReader = service.easyReader;
6526 }
6527
6528 if (service.hasOwnProperty('3D')) {
6529 svcObj[language]['3D'] = service['3D'];
6530 }
6531 });
6532 return svcObj;
6533 }, {});
6534 };
6535
6536 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
6537 var _attributes2;
6538
6539 var attributes = _ref3.attributes,
6540 segments = _ref3.segments,
6541 sidx = _ref3.sidx,
6542 discontinuityStarts = _ref3.discontinuityStarts;
6543 var playlist = {
6544 attributes: (_attributes2 = {
6545 NAME: attributes.id,
6546 AUDIO: 'audio',
6547 SUBTITLES: 'subs',
6548 RESOLUTION: {
6549 width: attributes.width,
6550 height: attributes.height
6551 },
6552 CODECS: attributes.codecs,
6553 BANDWIDTH: attributes.bandwidth
6554 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
6555 uri: '',
6556 endList: attributes.type === 'static',
6557 timeline: attributes.periodStart,
6558 resolvedUri: '',
6559 targetDuration: attributes.duration,
6560 discontinuityStarts: discontinuityStarts,
6561 timelineStarts: attributes.timelineStarts,
6562 segments: segments
6563 };
6564
6565 if (attributes.contentProtection) {
6566 playlist.contentProtection = attributes.contentProtection;
6567 }
6568
6569 if (sidx) {
6570 playlist.sidx = sidx;
6571 }
6572
6573 return playlist;
6574 };
6575
6576 var videoOnly = function videoOnly(_ref4) {
6577 var attributes = _ref4.attributes;
6578 return attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';
6579 };
6580
6581 var audioOnly = function audioOnly(_ref5) {
6582 var attributes = _ref5.attributes;
6583 return attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';
6584 };
6585
6586 var vttOnly = function vttOnly(_ref6) {
6587 var attributes = _ref6.attributes;
6588 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
6589 };
6590 /**
6591 * Contains start and timeline properties denoting a timeline start. For DASH, these will
6592 * be the same number.
6593 *
6594 * @typedef {Object} TimelineStart
6595 * @property {number} start - the start time of the timeline
6596 * @property {number} timeline - the timeline number
6597 */
6598
6599 /**
6600 * Adds appropriate media and discontinuity sequence values to the segments and playlists.
6601 *
6602 * Throughout mpd-parser, the `number` attribute is used in relation to `startNumber`, a
6603 * DASH specific attribute used in constructing segment URI's from templates. However, from
6604 * an HLS perspective, the `number` attribute on a segment would be its `mediaSequence`
6605 * value, which should start at the original media sequence value (or 0) and increment by 1
6606 * for each segment thereafter. Since DASH's `startNumber` values are independent per
6607 * period, it doesn't make sense to use it for `number`. Instead, assume everything starts
6608 * from a 0 mediaSequence value and increment from there.
6609 *
6610 * Note that VHS currently doesn't use the `number` property, but it can be helpful for
6611 * debugging and making sense of the manifest.
6612 *
6613 * For live playlists, to account for values increasing in manifests when periods are
6614 * removed on refreshes, merging logic should be used to update the numbers to their
6615 * appropriate values (to ensure they're sequential and increasing).
6616 *
6617 * @param {Object[]} playlists - the playlists to update
6618 * @param {TimelineStart[]} timelineStarts - the timeline starts for the manifest
6619 */
6620
6621
6622 var addMediaSequenceValues = function addMediaSequenceValues(playlists, timelineStarts) {
6623 // increment all segments sequentially
6624 playlists.forEach(function (playlist) {
6625 playlist.mediaSequence = 0;
6626 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref7) {
6627 var timeline = _ref7.timeline;
6628 return timeline === playlist.timeline;
6629 });
6630
6631 if (!playlist.segments) {
6632 return;
6633 }
6634
6635 playlist.segments.forEach(function (segment, index) {
6636 segment.number = index;
6637 });
6638 });
6639 };
6640 /**
6641 * Given a media group object, flattens all playlists within the media group into a single
6642 * array.
6643 *
6644 * @param {Object} mediaGroupObject - the media group object
6645 *
6646 * @return {Object[]}
6647 * The media group playlists
6648 */
6649
6650
6651 var flattenMediaGroupPlaylists = function flattenMediaGroupPlaylists(mediaGroupObject) {
6652 if (!mediaGroupObject) {
6653 return [];
6654 }
6655
6656 return Object.keys(mediaGroupObject).reduce(function (acc, label) {
6657 var labelContents = mediaGroupObject[label];
6658 return acc.concat(labelContents.playlists);
6659 }, []);
6660 };
6661
6662 var toM3u8 = function toM3u8(_ref8) {
6663 var _mediaGroups;
6664
6665 var dashPlaylists = _ref8.dashPlaylists,
6666 locations = _ref8.locations,
6667 _ref8$sidxMapping = _ref8.sidxMapping,
6668 sidxMapping = _ref8$sidxMapping === void 0 ? {} : _ref8$sidxMapping,
6669 previousManifest = _ref8.previousManifest;
6670
6671 if (!dashPlaylists.length) {
6672 return {};
6673 } // grab all main manifest attributes
6674
6675
6676 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
6677 duration = _dashPlaylists$0$attr.sourceDuration,
6678 type = _dashPlaylists$0$attr.type,
6679 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
6680 minimumUpdatePeriod = _dashPlaylists$0$attr.minimumUpdatePeriod;
6681 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
6682 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
6683 var vttPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(vttOnly));
6684 var captions = dashPlaylists.map(function (playlist) {
6685 return playlist.attributes.captionServices;
6686 }).filter(Boolean);
6687 var manifest = {
6688 allowCache: true,
6689 discontinuityStarts: [],
6690 segments: [],
6691 endList: true,
6692 mediaGroups: (_mediaGroups = {
6693 AUDIO: {},
6694 VIDEO: {}
6695 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
6696 uri: '',
6697 duration: duration,
6698 playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)
6699 };
6700
6701 if (minimumUpdatePeriod >= 0) {
6702 manifest.minimumUpdatePeriod = minimumUpdatePeriod * 1000;
6703 }
6704
6705 if (locations) {
6706 manifest.locations = locations;
6707 }
6708
6709 if (type === 'dynamic') {
6710 manifest.suggestedPresentationDelay = suggestedPresentationDelay;
6711 }
6712
6713 var isAudioOnly = manifest.playlists.length === 0;
6714 var organizedAudioGroup = audioPlaylists.length ? organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly) : null;
6715 var organizedVttGroup = vttPlaylists.length ? organizeVttPlaylists(vttPlaylists, sidxMapping) : null;
6716 var formattedPlaylists = videoPlaylists.concat(flattenMediaGroupPlaylists(organizedAudioGroup), flattenMediaGroupPlaylists(organizedVttGroup));
6717 var playlistTimelineStarts = formattedPlaylists.map(function (_ref9) {
6718 var timelineStarts = _ref9.timelineStarts;
6719 return timelineStarts;
6720 });
6721 manifest.timelineStarts = getUniqueTimelineStarts(playlistTimelineStarts);
6722 addMediaSequenceValues(formattedPlaylists, manifest.timelineStarts);
6723
6724 if (organizedAudioGroup) {
6725 manifest.mediaGroups.AUDIO.audio = organizedAudioGroup;
6726 }
6727
6728 if (organizedVttGroup) {
6729 manifest.mediaGroups.SUBTITLES.subs = organizedVttGroup;
6730 }
6731
6732 if (captions.length) {
6733 manifest.mediaGroups['CLOSED-CAPTIONS'].cc = organizeCaptionServices(captions);
6734 }
6735
6736 if (previousManifest) {
6737 return positionManifestOnTimeline({
6738 oldManifest: previousManifest,
6739 newManifest: manifest
6740 });
6741 }
6742
6743 return manifest;
6744 };
6745 /**
6746 * Calculates the R (repetition) value for a live stream (for the final segment
6747 * in a manifest where the r value is negative 1)
6748 *
6749 * @param {Object} attributes
6750 * Object containing all inherited attributes from parent elements with attribute
6751 * names as keys
6752 * @param {number} time
6753 * current time (typically the total time up until the final segment)
6754 * @param {number} duration
6755 * duration property for the given <S />
6756 *
6757 * @return {number}
6758 * R value to reach the end of the given period
6759 */
6760
6761
6762 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
6763 var NOW = attributes.NOW,
6764 clientOffset = attributes.clientOffset,
6765 availabilityStartTime = attributes.availabilityStartTime,
6766 _attributes$timescale = attributes.timescale,
6767 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
6768 _attributes$periodSta = attributes.periodStart,
6769 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
6770 _attributes$minimumUp = attributes.minimumUpdatePeriod,
6771 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
6772 var now = (NOW + clientOffset) / 1000;
6773 var periodStartWC = availabilityStartTime + periodStart;
6774 var periodEndWC = now + minimumUpdatePeriod;
6775 var periodDuration = periodEndWC - periodStartWC;
6776 return Math.ceil((periodDuration * timescale - time) / duration);
6777 };
6778 /**
6779 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
6780 * timing and duration
6781 *
6782 * @param {Object} attributes
6783 * Object containing all inherited attributes from parent elements with attribute
6784 * names as keys
6785 * @param {Object[]} segmentTimeline
6786 * List of objects representing the attributes of each S element contained within
6787 *
6788 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6789 * List of Objects with segment timing and duration info
6790 */
6791
6792
6793 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
6794 var type = attributes.type,
6795 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
6796 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
6797 _attributes$media = attributes.media,
6798 media = _attributes$media === void 0 ? '' : _attributes$media,
6799 sourceDuration = attributes.sourceDuration,
6800 _attributes$timescale2 = attributes.timescale,
6801 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
6802 _attributes$startNumb = attributes.startNumber,
6803 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
6804 timeline = attributes.periodStart;
6805 var segments = [];
6806 var time = -1;
6807
6808 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
6809 var S = segmentTimeline[sIndex];
6810 var duration = S.d;
6811 var repeat = S.r || 0;
6812 var segmentTime = S.t || 0;
6813
6814 if (time < 0) {
6815 // first segment
6816 time = segmentTime;
6817 }
6818
6819 if (segmentTime && segmentTime > time) {
6820 // discontinuity
6821 // TODO: How to handle this type of discontinuity
6822 // timeline++ here would treat it like HLS discontuity and content would
6823 // get appended without gap
6824 // E.G.
6825 // <S t="0" d="1" />
6826 // <S d="1" />
6827 // <S d="1" />
6828 // <S t="5" d="1" />
6829 // would have $Time$ values of [0, 1, 2, 5]
6830 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
6831 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
6832 // does the value of sourceDuration consider this when calculating arbitrary
6833 // negative @r repeat value?
6834 // E.G. Same elements as above with this added at the end
6835 // <S d="1" r="-1" />
6836 // with a sourceDuration of 10
6837 // Would the 2 gaps be included in the time duration calculations resulting in
6838 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
6839 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
6840 time = segmentTime;
6841 }
6842
6843 var count = void 0;
6844
6845 if (repeat < 0) {
6846 var nextS = sIndex + 1;
6847
6848 if (nextS === segmentTimeline.length) {
6849 // last segment
6850 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
6851 count = getLiveRValue(attributes, time, duration);
6852 } else {
6853 // TODO: This may be incorrect depending on conclusion of TODO above
6854 count = (sourceDuration * timescale - time) / duration;
6855 }
6856 } else {
6857 count = (segmentTimeline[nextS].t - time) / duration;
6858 }
6859 } else {
6860 count = repeat + 1;
6861 }
6862
6863 var end = startNumber + segments.length + count;
6864 var number = startNumber + segments.length;
6865
6866 while (number < end) {
6867 segments.push({
6868 number: number,
6869 duration: duration / timescale,
6870 time: time,
6871 timeline: timeline
6872 });
6873 time += duration;
6874 number++;
6875 }
6876 }
6877
6878 return segments;
6879 };
6880
6881 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
6882 /**
6883 * Replaces template identifiers with corresponding values. To be used as the callback
6884 * for String.prototype.replace
6885 *
6886 * @name replaceCallback
6887 * @function
6888 * @param {string} match
6889 * Entire match of identifier
6890 * @param {string} identifier
6891 * Name of matched identifier
6892 * @param {string} format
6893 * Format tag string. Its presence indicates that padding is expected
6894 * @param {string} width
6895 * Desired length of the replaced value. Values less than this width shall be left
6896 * zero padded
6897 * @return {string}
6898 * Replacement for the matched identifier
6899 */
6900
6901 /**
6902 * Returns a function to be used as a callback for String.prototype.replace to replace
6903 * template identifiers
6904 *
6905 * @param {Obect} values
6906 * Object containing values that shall be used to replace known identifiers
6907 * @param {number} values.RepresentationID
6908 * Value of the Representation@id attribute
6909 * @param {number} values.Number
6910 * Number of the corresponding segment
6911 * @param {number} values.Bandwidth
6912 * Value of the Representation@bandwidth attribute.
6913 * @param {number} values.Time
6914 * Timestamp value of the corresponding segment
6915 * @return {replaceCallback}
6916 * Callback to be used with String.prototype.replace to replace identifiers
6917 */
6918
6919 var identifierReplacement = function identifierReplacement(values) {
6920 return function (match, identifier, format, width) {
6921 if (match === '$$') {
6922 // escape sequence
6923 return '$';
6924 }
6925
6926 if (typeof values[identifier] === 'undefined') {
6927 return match;
6928 }
6929
6930 var value = '' + values[identifier];
6931
6932 if (identifier === 'RepresentationID') {
6933 // Format tag shall not be present with RepresentationID
6934 return value;
6935 }
6936
6937 if (!format) {
6938 width = 1;
6939 } else {
6940 width = parseInt(width, 10);
6941 }
6942
6943 if (value.length >= width) {
6944 return value;
6945 }
6946
6947 return "" + new Array(width - value.length + 1).join('0') + value;
6948 };
6949 };
6950 /**
6951 * Constructs a segment url from a template string
6952 *
6953 * @param {string} url
6954 * Template string to construct url from
6955 * @param {Obect} values
6956 * Object containing values that shall be used to replace known identifiers
6957 * @param {number} values.RepresentationID
6958 * Value of the Representation@id attribute
6959 * @param {number} values.Number
6960 * Number of the corresponding segment
6961 * @param {number} values.Bandwidth
6962 * Value of the Representation@bandwidth attribute.
6963 * @param {number} values.Time
6964 * Timestamp value of the corresponding segment
6965 * @return {string}
6966 * Segment url with identifiers replaced
6967 */
6968
6969
6970 var constructTemplateUrl = function constructTemplateUrl(url, values) {
6971 return url.replace(identifierPattern, identifierReplacement(values));
6972 };
6973 /**
6974 * Generates a list of objects containing timing and duration information about each
6975 * segment needed to generate segment uris and the complete segment object
6976 *
6977 * @param {Object} attributes
6978 * Object containing all inherited attributes from parent elements with attribute
6979 * names as keys
6980 * @param {Object[]|undefined} segmentTimeline
6981 * List of objects representing the attributes of each S element contained within
6982 * the SegmentTimeline element
6983 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6984 * List of Objects with segment timing and duration info
6985 */
6986
6987
6988 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
6989 if (!attributes.duration && !segmentTimeline) {
6990 // if neither @duration or SegmentTimeline are present, then there shall be exactly
6991 // one media segment
6992 return [{
6993 number: attributes.startNumber || 1,
6994 duration: attributes.sourceDuration,
6995 time: 0,
6996 timeline: attributes.periodStart
6997 }];
6998 }
6999
7000 if (attributes.duration) {
7001 return parseByDuration(attributes);
7002 }
7003
7004 return parseByTimeline(attributes, segmentTimeline);
7005 };
7006 /**
7007 * Generates a list of segments using information provided by the SegmentTemplate element
7008 *
7009 * @param {Object} attributes
7010 * Object containing all inherited attributes from parent elements with attribute
7011 * names as keys
7012 * @param {Object[]|undefined} segmentTimeline
7013 * List of objects representing the attributes of each S element contained within
7014 * the SegmentTimeline element
7015 * @return {Object[]}
7016 * List of segment objects
7017 */
7018
7019
7020 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
7021 var templateValues = {
7022 RepresentationID: attributes.id,
7023 Bandwidth: attributes.bandwidth || 0
7024 };
7025 var _attributes$initializ = attributes.initialization,
7026 initialization = _attributes$initializ === void 0 ? {
7027 sourceURL: '',
7028 range: ''
7029 } : _attributes$initializ;
7030 var mapSegment = urlTypeToSegment({
7031 baseUrl: attributes.baseUrl,
7032 source: constructTemplateUrl(initialization.sourceURL, templateValues),
7033 range: initialization.range
7034 });
7035 var segments = parseTemplateInfo(attributes, segmentTimeline);
7036 return segments.map(function (segment) {
7037 templateValues.Number = segment.number;
7038 templateValues.Time = segment.time;
7039 var uri = constructTemplateUrl(attributes.media || '', templateValues); // See DASH spec section 5.3.9.2.2
7040 // - if timescale isn't present on any level, default to 1.
7041
7042 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7043
7044 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7045 var presentationTime = // Even if the @t attribute is not specified for the segment, segment.time is
7046 // calculated in mpd-parser prior to this, so it's assumed to be available.
7047 attributes.periodStart + (segment.time - presentationTimeOffset) / timescale;
7048 var map = {
7049 uri: uri,
7050 timeline: segment.timeline,
7051 duration: segment.duration,
7052 resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),
7053 map: mapSegment,
7054 number: segment.number,
7055 presentationTime: presentationTime
7056 };
7057 return map;
7058 });
7059 };
7060 /**
7061 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
7062 * to an object that matches the output of a segment in videojs/mpd-parser
7063 *
7064 * @param {Object} attributes
7065 * Object containing all inherited attributes from parent elements with attribute
7066 * names as keys
7067 * @param {Object} segmentUrl
7068 * <SegmentURL> node to translate into a segment object
7069 * @return {Object} translated segment object
7070 */
7071
7072
7073 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
7074 var baseUrl = attributes.baseUrl,
7075 _attributes$initializ = attributes.initialization,
7076 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
7077 var initSegment = urlTypeToSegment({
7078 baseUrl: baseUrl,
7079 source: initialization.sourceURL,
7080 range: initialization.range
7081 });
7082 var segment = urlTypeToSegment({
7083 baseUrl: baseUrl,
7084 source: segmentUrl.media,
7085 range: segmentUrl.mediaRange
7086 });
7087 segment.map = initSegment;
7088 return segment;
7089 };
7090 /**
7091 * Generates a list of segments using information provided by the SegmentList element
7092 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
7093 * node should be translated into segment.
7094 *
7095 * @param {Object} attributes
7096 * Object containing all inherited attributes from parent elements with attribute
7097 * names as keys
7098 * @param {Object[]|undefined} segmentTimeline
7099 * List of objects representing the attributes of each S element contained within
7100 * the SegmentTimeline element
7101 * @return {Object.<Array>} list of segments
7102 */
7103
7104
7105 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
7106 var duration = attributes.duration,
7107 _attributes$segmentUr = attributes.segmentUrls,
7108 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr,
7109 periodStart = attributes.periodStart; // Per spec (5.3.9.2.1) no way to determine segment duration OR
7110 // if both SegmentTimeline and @duration are defined, it is outside of spec.
7111
7112 if (!duration && !segmentTimeline || duration && segmentTimeline) {
7113 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
7114 }
7115
7116 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
7117 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
7118 });
7119 var segmentTimeInfo;
7120
7121 if (duration) {
7122 segmentTimeInfo = parseByDuration(attributes);
7123 }
7124
7125 if (segmentTimeline) {
7126 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
7127 }
7128
7129 var segments = segmentTimeInfo.map(function (segmentTime, index) {
7130 if (segmentUrlMap[index]) {
7131 var segment = segmentUrlMap[index]; // See DASH spec section 5.3.9.2.2
7132 // - if timescale isn't present on any level, default to 1.
7133
7134 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7135
7136 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7137 segment.timeline = segmentTime.timeline;
7138 segment.duration = segmentTime.duration;
7139 segment.number = segmentTime.number;
7140 segment.presentationTime = periodStart + (segmentTime.time - presentationTimeOffset) / timescale;
7141 return segment;
7142 } // Since we're mapping we should get rid of any blank segments (in case
7143 // the given SegmentTimeline is handling for more elements than we have
7144 // SegmentURLs for).
7145
7146 }).filter(function (segment) {
7147 return segment;
7148 });
7149 return segments;
7150 };
7151
7152 var generateSegments = function generateSegments(_ref) {
7153 var attributes = _ref.attributes,
7154 segmentInfo = _ref.segmentInfo;
7155 var segmentAttributes;
7156 var segmentsFn;
7157
7158 if (segmentInfo.template) {
7159 segmentsFn = segmentsFromTemplate;
7160 segmentAttributes = merge(attributes, segmentInfo.template);
7161 } else if (segmentInfo.base) {
7162 segmentsFn = segmentsFromBase;
7163 segmentAttributes = merge(attributes, segmentInfo.base);
7164 } else if (segmentInfo.list) {
7165 segmentsFn = segmentsFromList;
7166 segmentAttributes = merge(attributes, segmentInfo.list);
7167 }
7168
7169 var segmentsInfo = {
7170 attributes: attributes
7171 };
7172
7173 if (!segmentsFn) {
7174 return segmentsInfo;
7175 }
7176
7177 var segments = segmentsFn(segmentAttributes, segmentInfo.segmentTimeline); // The @duration attribute will be used to determin the playlist's targetDuration which
7178 // must be in seconds. Since we've generated the segment list, we no longer need
7179 // @duration to be in @timescale units, so we can convert it here.
7180
7181 if (segmentAttributes.duration) {
7182 var _segmentAttributes = segmentAttributes,
7183 duration = _segmentAttributes.duration,
7184 _segmentAttributes$ti = _segmentAttributes.timescale,
7185 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
7186 segmentAttributes.duration = duration / timescale;
7187 } else if (segments.length) {
7188 // if there is no @duration attribute, use the largest segment duration as
7189 // as target duration
7190 segmentAttributes.duration = segments.reduce(function (max, segment) {
7191 return Math.max(max, Math.ceil(segment.duration));
7192 }, 0);
7193 } else {
7194 segmentAttributes.duration = 0;
7195 }
7196
7197 segmentsInfo.attributes = segmentAttributes;
7198 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
7199
7200 if (segmentInfo.base && segmentAttributes.indexRange) {
7201 segmentsInfo.sidx = segments[0];
7202 segmentsInfo.segments = [];
7203 }
7204
7205 return segmentsInfo;
7206 };
7207
7208 var toPlaylists = function toPlaylists(representations) {
7209 return representations.map(generateSegments);
7210 };
7211
7212 var findChildren = function findChildren(element, name) {
7213 return from(element.childNodes).filter(function (_ref) {
7214 var tagName = _ref.tagName;
7215 return tagName === name;
7216 });
7217 };
7218
7219 var getContent = function getContent(element) {
7220 return element.textContent.trim();
7221 };
7222
7223 var parseDuration = function parseDuration(str) {
7224 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
7225 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
7226 var SECONDS_IN_DAY = 24 * 60 * 60;
7227 var SECONDS_IN_HOUR = 60 * 60;
7228 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
7229
7230 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
7231 var match = durationRegex.exec(str);
7232
7233 if (!match) {
7234 return 0;
7235 }
7236
7237 var _match$slice = match.slice(1),
7238 year = _match$slice[0],
7239 month = _match$slice[1],
7240 day = _match$slice[2],
7241 hour = _match$slice[3],
7242 minute = _match$slice[4],
7243 second = _match$slice[5];
7244
7245 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
7246 };
7247
7248 var parseDate = function parseDate(str) {
7249 // Date format without timezone according to ISO 8601
7250 // YYY-MM-DDThh:mm:ss.ssssss
7251 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
7252 // expressed by ending with 'Z'
7253
7254 if (dateRegex.test(str)) {
7255 str += 'Z';
7256 }
7257
7258 return Date.parse(str);
7259 };
7260
7261 var parsers = {
7262 /**
7263 * Specifies the duration of the entire Media Presentation. Format is a duration string
7264 * as specified in ISO 8601
7265 *
7266 * @param {string} value
7267 * value of attribute as a string
7268 * @return {number}
7269 * The duration in seconds
7270 */
7271 mediaPresentationDuration: function mediaPresentationDuration(value) {
7272 return parseDuration(value);
7273 },
7274
7275 /**
7276 * Specifies the Segment availability start time for all Segments referred to in this
7277 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
7278 * time. Format is a date string as specified in ISO 8601
7279 *
7280 * @param {string} value
7281 * value of attribute as a string
7282 * @return {number}
7283 * The date as seconds from unix epoch
7284 */
7285 availabilityStartTime: function availabilityStartTime(value) {
7286 return parseDate(value) / 1000;
7287 },
7288
7289 /**
7290 * Specifies the smallest period between potential changes to the MPD. Format is a
7291 * duration string as specified in ISO 8601
7292 *
7293 * @param {string} value
7294 * value of attribute as a string
7295 * @return {number}
7296 * The duration in seconds
7297 */
7298 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
7299 return parseDuration(value);
7300 },
7301
7302 /**
7303 * Specifies the suggested presentation delay. Format is a
7304 * duration string as specified in ISO 8601
7305 *
7306 * @param {string} value
7307 * value of attribute as a string
7308 * @return {number}
7309 * The duration in seconds
7310 */
7311 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
7312 return parseDuration(value);
7313 },
7314
7315 /**
7316 * specifices the type of mpd. Can be either "static" or "dynamic"
7317 *
7318 * @param {string} value
7319 * value of attribute as a string
7320 *
7321 * @return {string}
7322 * The type as a string
7323 */
7324 type: function type(value) {
7325 return value;
7326 },
7327
7328 /**
7329 * Specifies the duration of the smallest time shifting buffer for any Representation
7330 * in the MPD. Format is a duration string as specified in ISO 8601
7331 *
7332 * @param {string} value
7333 * value of attribute as a string
7334 * @return {number}
7335 * The duration in seconds
7336 */
7337 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
7338 return parseDuration(value);
7339 },
7340
7341 /**
7342 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
7343 * Format is a duration string as specified in ISO 8601
7344 *
7345 * @param {string} value
7346 * value of attribute as a string
7347 * @return {number}
7348 * The duration in seconds
7349 */
7350 start: function start(value) {
7351 return parseDuration(value);
7352 },
7353
7354 /**
7355 * Specifies the width of the visual presentation
7356 *
7357 * @param {string} value
7358 * value of attribute as a string
7359 * @return {number}
7360 * The parsed width
7361 */
7362 width: function width(value) {
7363 return parseInt(value, 10);
7364 },
7365
7366 /**
7367 * Specifies the height of the visual presentation
7368 *
7369 * @param {string} value
7370 * value of attribute as a string
7371 * @return {number}
7372 * The parsed height
7373 */
7374 height: function height(value) {
7375 return parseInt(value, 10);
7376 },
7377
7378 /**
7379 * Specifies the bitrate of the representation
7380 *
7381 * @param {string} value
7382 * value of attribute as a string
7383 * @return {number}
7384 * The parsed bandwidth
7385 */
7386 bandwidth: function bandwidth(value) {
7387 return parseInt(value, 10);
7388 },
7389
7390 /**
7391 * Specifies the number of the first Media Segment in this Representation in the Period
7392 *
7393 * @param {string} value
7394 * value of attribute as a string
7395 * @return {number}
7396 * The parsed number
7397 */
7398 startNumber: function startNumber(value) {
7399 return parseInt(value, 10);
7400 },
7401
7402 /**
7403 * Specifies the timescale in units per seconds
7404 *
7405 * @param {string} value
7406 * value of attribute as a string
7407 * @return {number}
7408 * The parsed timescale
7409 */
7410 timescale: function timescale(value) {
7411 return parseInt(value, 10);
7412 },
7413
7414 /**
7415 * Specifies the presentationTimeOffset.
7416 *
7417 * @param {string} value
7418 * value of the attribute as a string
7419 *
7420 * @return {number}
7421 * The parsed presentationTimeOffset
7422 */
7423 presentationTimeOffset: function presentationTimeOffset(value) {
7424 return parseInt(value, 10);
7425 },
7426
7427 /**
7428 * Specifies the constant approximate Segment duration
7429 * NOTE: The <Period> element also contains an @duration attribute. This duration
7430 * specifies the duration of the Period. This attribute is currently not
7431 * supported by the rest of the parser, however we still check for it to prevent
7432 * errors.
7433 *
7434 * @param {string} value
7435 * value of attribute as a string
7436 * @return {number}
7437 * The parsed duration
7438 */
7439 duration: function duration(value) {
7440 var parsedValue = parseInt(value, 10);
7441
7442 if (isNaN(parsedValue)) {
7443 return parseDuration(value);
7444 }
7445
7446 return parsedValue;
7447 },
7448
7449 /**
7450 * Specifies the Segment duration, in units of the value of the @timescale.
7451 *
7452 * @param {string} value
7453 * value of attribute as a string
7454 * @return {number}
7455 * The parsed duration
7456 */
7457 d: function d(value) {
7458 return parseInt(value, 10);
7459 },
7460
7461 /**
7462 * Specifies the MPD start time, in @timescale units, the first Segment in the series
7463 * starts relative to the beginning of the Period
7464 *
7465 * @param {string} value
7466 * value of attribute as a string
7467 * @return {number}
7468 * The parsed time
7469 */
7470 t: function t(value) {
7471 return parseInt(value, 10);
7472 },
7473
7474 /**
7475 * Specifies the repeat count of the number of following contiguous Segments with the
7476 * same duration expressed by the value of @d
7477 *
7478 * @param {string} value
7479 * value of attribute as a string
7480 * @return {number}
7481 * The parsed number
7482 */
7483 r: function r(value) {
7484 return parseInt(value, 10);
7485 },
7486
7487 /**
7488 * Default parser for all other attributes. Acts as a no-op and just returns the value
7489 * as a string
7490 *
7491 * @param {string} value
7492 * value of attribute as a string
7493 * @return {string}
7494 * Unparsed value
7495 */
7496 DEFAULT: function DEFAULT(value) {
7497 return value;
7498 }
7499 };
7500 /**
7501 * Gets all the attributes and values of the provided node, parses attributes with known
7502 * types, and returns an object with attribute names mapped to values.
7503 *
7504 * @param {Node} el
7505 * The node to parse attributes from
7506 * @return {Object}
7507 * Object with all attributes of el parsed
7508 */
7509
7510 var parseAttributes = function parseAttributes(el) {
7511 if (!(el && el.attributes)) {
7512 return {};
7513 }
7514
7515 return from(el.attributes).reduce(function (a, e) {
7516 var parseFn = parsers[e.name] || parsers.DEFAULT;
7517 a[e.name] = parseFn(e.value);
7518 return a;
7519 }, {});
7520 };
7521
7522 var keySystemsMap = {
7523 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
7524 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
7525 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
7526 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
7527 };
7528 /**
7529 * Builds a list of urls that is the product of the reference urls and BaseURL values
7530 *
7531 * @param {string[]} referenceUrls
7532 * List of reference urls to resolve to
7533 * @param {Node[]} baseUrlElements
7534 * List of BaseURL nodes from the mpd
7535 * @return {string[]}
7536 * List of resolved urls
7537 */
7538
7539 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
7540 if (!baseUrlElements.length) {
7541 return referenceUrls;
7542 }
7543
7544 return flatten(referenceUrls.map(function (reference) {
7545 return baseUrlElements.map(function (baseUrlElement) {
7546 return resolveUrl$1(reference, getContent(baseUrlElement));
7547 });
7548 }));
7549 };
7550 /**
7551 * Contains all Segment information for its containing AdaptationSet
7552 *
7553 * @typedef {Object} SegmentInformation
7554 * @property {Object|undefined} template
7555 * Contains the attributes for the SegmentTemplate node
7556 * @property {Object[]|undefined} segmentTimeline
7557 * Contains a list of atrributes for each S node within the SegmentTimeline node
7558 * @property {Object|undefined} list
7559 * Contains the attributes for the SegmentList node
7560 * @property {Object|undefined} base
7561 * Contains the attributes for the SegmentBase node
7562 */
7563
7564 /**
7565 * Returns all available Segment information contained within the AdaptationSet node
7566 *
7567 * @param {Node} adaptationSet
7568 * The AdaptationSet node to get Segment information from
7569 * @return {SegmentInformation}
7570 * The Segment information contained within the provided AdaptationSet
7571 */
7572
7573
7574 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
7575 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
7576 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
7577 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
7578 return merge({
7579 tag: 'SegmentURL'
7580 }, parseAttributes(s));
7581 });
7582 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
7583 var segmentTimelineParentNode = segmentList || segmentTemplate;
7584 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
7585 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
7586 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
7587 // @initialization and an <Initialization> node. @initialization can be templated,
7588 // while the node can have a url and range specified. If the <SegmentTemplate> has
7589 // both @initialization and an <Initialization> subelement we opt to override with
7590 // the node, as this interaction is not defined in the spec.
7591
7592 var template = segmentTemplate && parseAttributes(segmentTemplate);
7593
7594 if (template && segmentInitialization) {
7595 template.initialization = segmentInitialization && parseAttributes(segmentInitialization);
7596 } else if (template && template.initialization) {
7597 // If it is @initialization we convert it to an object since this is the format that
7598 // later functions will rely on for the initialization segment. This is only valid
7599 // for <SegmentTemplate>
7600 template.initialization = {
7601 sourceURL: template.initialization
7602 };
7603 }
7604
7605 var segmentInfo = {
7606 template: template,
7607 segmentTimeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
7608 return parseAttributes(s);
7609 }),
7610 list: segmentList && merge(parseAttributes(segmentList), {
7611 segmentUrls: segmentUrls,
7612 initialization: parseAttributes(segmentInitialization)
7613 }),
7614 base: segmentBase && merge(parseAttributes(segmentBase), {
7615 initialization: parseAttributes(segmentInitialization)
7616 })
7617 };
7618 Object.keys(segmentInfo).forEach(function (key) {
7619 if (!segmentInfo[key]) {
7620 delete segmentInfo[key];
7621 }
7622 });
7623 return segmentInfo;
7624 };
7625 /**
7626 * Contains Segment information and attributes needed to construct a Playlist object
7627 * from a Representation
7628 *
7629 * @typedef {Object} RepresentationInformation
7630 * @property {SegmentInformation} segmentInfo
7631 * Segment information for this Representation
7632 * @property {Object} attributes
7633 * Inherited attributes for this Representation
7634 */
7635
7636 /**
7637 * Maps a Representation node to an object containing Segment information and attributes
7638 *
7639 * @name inheritBaseUrlsCallback
7640 * @function
7641 * @param {Node} representation
7642 * Representation node from the mpd
7643 * @return {RepresentationInformation}
7644 * Representation information needed to construct a Playlist object
7645 */
7646
7647 /**
7648 * Returns a callback for Array.prototype.map for mapping Representation nodes to
7649 * Segment information and attributes using inherited BaseURL nodes.
7650 *
7651 * @param {Object} adaptationSetAttributes
7652 * Contains attributes inherited by the AdaptationSet
7653 * @param {string[]} adaptationSetBaseUrls
7654 * Contains list of resolved base urls inherited by the AdaptationSet
7655 * @param {SegmentInformation} adaptationSetSegmentInfo
7656 * Contains Segment information for the AdaptationSet
7657 * @return {inheritBaseUrlsCallback}
7658 * Callback map function
7659 */
7660
7661
7662 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
7663 return function (representation) {
7664 var repBaseUrlElements = findChildren(representation, 'BaseURL');
7665 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
7666 var attributes = merge(adaptationSetAttributes, parseAttributes(representation));
7667 var representationSegmentInfo = getSegmentInformation(representation);
7668 return repBaseUrls.map(function (baseUrl) {
7669 return {
7670 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
7671 attributes: merge(attributes, {
7672 baseUrl: baseUrl
7673 })
7674 };
7675 });
7676 };
7677 };
7678 /**
7679 * Tranforms a series of content protection nodes to
7680 * an object containing pssh data by key system
7681 *
7682 * @param {Node[]} contentProtectionNodes
7683 * Content protection nodes
7684 * @return {Object}
7685 * Object containing pssh data by key system
7686 */
7687
7688
7689 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
7690 return contentProtectionNodes.reduce(function (acc, node) {
7691 var attributes = parseAttributes(node); // Although it could be argued that according to the UUID RFC spec the UUID string (a-f chars) should be generated
7692 // as a lowercase string it also mentions it should be treated as case-insensitive on input. Since the key system
7693 // UUIDs in the keySystemsMap are hardcoded as lowercase in the codebase there isn't any reason not to do
7694 // .toLowerCase() on the input UUID string from the manifest (at least I could not think of one).
7695
7696 if (attributes.schemeIdUri) {
7697 attributes.schemeIdUri = attributes.schemeIdUri.toLowerCase();
7698 }
7699
7700 var keySystem = keySystemsMap[attributes.schemeIdUri];
7701
7702 if (keySystem) {
7703 acc[keySystem] = {
7704 attributes: attributes
7705 };
7706 var psshNode = findChildren(node, 'cenc:pssh')[0];
7707
7708 if (psshNode) {
7709 var pssh = getContent(psshNode);
7710 acc[keySystem].pssh = pssh && decodeB64ToUint8Array(pssh);
7711 }
7712 }
7713
7714 return acc;
7715 }, {});
7716 }; // defined in ANSI_SCTE 214-1 2016
7717
7718
7719 var parseCaptionServiceMetadata = function parseCaptionServiceMetadata(service) {
7720 // 608 captions
7721 if (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {
7722 var values = typeof service.value !== 'string' ? [] : service.value.split(';');
7723 return values.map(function (value) {
7724 var channel;
7725 var language; // default language to value
7726
7727 language = value;
7728
7729 if (/^CC\d=/.test(value)) {
7730 var _value$split = value.split('=');
7731
7732 channel = _value$split[0];
7733 language = _value$split[1];
7734 } else if (/^CC\d$/.test(value)) {
7735 channel = value;
7736 }
7737
7738 return {
7739 channel: channel,
7740 language: language
7741 };
7742 });
7743 } else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {
7744 var _values = typeof service.value !== 'string' ? [] : service.value.split(';');
7745
7746 return _values.map(function (value) {
7747 var flags = {
7748 // service or channel number 1-63
7749 'channel': undefined,
7750 // language is a 3ALPHA per ISO 639.2/B
7751 // field is required
7752 'language': undefined,
7753 // BIT 1/0 or ?
7754 // default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown
7755 'aspectRatio': 1,
7756 // BIT 1/0
7757 // easy reader flag indicated the text is tailed to the needs of beginning readers
7758 // default 0, or off
7759 'easyReader': 0,
7760 // BIT 1/0
7761 // If 3d metadata is present (CEA-708.1) then 1
7762 // default 0
7763 '3D': 0
7764 };
7765
7766 if (/=/.test(value)) {
7767 var _value$split2 = value.split('='),
7768 channel = _value$split2[0],
7769 _value$split2$ = _value$split2[1],
7770 opts = _value$split2$ === void 0 ? '' : _value$split2$;
7771
7772 flags.channel = channel;
7773 flags.language = value;
7774 opts.split(',').forEach(function (opt) {
7775 var _opt$split = opt.split(':'),
7776 name = _opt$split[0],
7777 val = _opt$split[1];
7778
7779 if (name === 'lang') {
7780 flags.language = val; // er for easyReadery
7781 } else if (name === 'er') {
7782 flags.easyReader = Number(val); // war for wide aspect ratio
7783 } else if (name === 'war') {
7784 flags.aspectRatio = Number(val);
7785 } else if (name === '3D') {
7786 flags['3D'] = Number(val);
7787 }
7788 });
7789 } else {
7790 flags.language = value;
7791 }
7792
7793 if (flags.channel) {
7794 flags.channel = 'SERVICE' + flags.channel;
7795 }
7796
7797 return flags;
7798 });
7799 }
7800 };
7801 /**
7802 * Maps an AdaptationSet node to a list of Representation information objects
7803 *
7804 * @name toRepresentationsCallback
7805 * @function
7806 * @param {Node} adaptationSet
7807 * AdaptationSet node from the mpd
7808 * @return {RepresentationInformation[]}
7809 * List of objects containing Representaion information
7810 */
7811
7812 /**
7813 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
7814 * Representation information objects
7815 *
7816 * @param {Object} periodAttributes
7817 * Contains attributes inherited by the Period
7818 * @param {string[]} periodBaseUrls
7819 * Contains list of resolved base urls inherited by the Period
7820 * @param {string[]} periodSegmentInfo
7821 * Contains Segment Information at the period level
7822 * @return {toRepresentationsCallback}
7823 * Callback map function
7824 */
7825
7826
7827 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
7828 return function (adaptationSet) {
7829 var adaptationSetAttributes = parseAttributes(adaptationSet);
7830 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
7831 var role = findChildren(adaptationSet, 'Role')[0];
7832 var roleAttributes = {
7833 role: parseAttributes(role)
7834 };
7835 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
7836 var accessibility = findChildren(adaptationSet, 'Accessibility')[0];
7837 var captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));
7838
7839 if (captionServices) {
7840 attrs = merge(attrs, {
7841 captionServices: captionServices
7842 });
7843 }
7844
7845 var label = findChildren(adaptationSet, 'Label')[0];
7846
7847 if (label && label.childNodes.length) {
7848 var labelVal = label.childNodes[0].nodeValue.trim();
7849 attrs = merge(attrs, {
7850 label: labelVal
7851 });
7852 }
7853
7854 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
7855
7856 if (Object.keys(contentProtection).length) {
7857 attrs = merge(attrs, {
7858 contentProtection: contentProtection
7859 });
7860 }
7861
7862 var segmentInfo = getSegmentInformation(adaptationSet);
7863 var representations = findChildren(adaptationSet, 'Representation');
7864 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
7865 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
7866 };
7867 };
7868 /**
7869 * Contains all period information for mapping nodes onto adaptation sets.
7870 *
7871 * @typedef {Object} PeriodInformation
7872 * @property {Node} period.node
7873 * Period node from the mpd
7874 * @property {Object} period.attributes
7875 * Parsed period attributes from node plus any added
7876 */
7877
7878 /**
7879 * Maps a PeriodInformation object to a list of Representation information objects for all
7880 * AdaptationSet nodes contained within the Period.
7881 *
7882 * @name toAdaptationSetsCallback
7883 * @function
7884 * @param {PeriodInformation} period
7885 * Period object containing necessary period information
7886 * @param {number} periodStart
7887 * Start time of the Period within the mpd
7888 * @return {RepresentationInformation[]}
7889 * List of objects containing Representaion information
7890 */
7891
7892 /**
7893 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
7894 * Representation information objects
7895 *
7896 * @param {Object} mpdAttributes
7897 * Contains attributes inherited by the mpd
7898 * @param {string[]} mpdBaseUrls
7899 * Contains list of resolved base urls inherited by the mpd
7900 * @return {toAdaptationSetsCallback}
7901 * Callback map function
7902 */
7903
7904
7905 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
7906 return function (period, index) {
7907 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));
7908 var periodAttributes = merge(mpdAttributes, {
7909 periodStart: period.attributes.start
7910 });
7911
7912 if (typeof period.attributes.duration === 'number') {
7913 periodAttributes.periodDuration = period.attributes.duration;
7914 }
7915
7916 var adaptationSets = findChildren(period.node, 'AdaptationSet');
7917 var periodSegmentInfo = getSegmentInformation(period.node);
7918 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
7919 };
7920 };
7921 /**
7922 * Gets Period@start property for a given period.
7923 *
7924 * @param {Object} options
7925 * Options object
7926 * @param {Object} options.attributes
7927 * Period attributes
7928 * @param {Object} [options.priorPeriodAttributes]
7929 * Prior period attributes (if prior period is available)
7930 * @param {string} options.mpdType
7931 * The MPD@type these periods came from
7932 * @return {number|null}
7933 * The period start, or null if it's an early available period or error
7934 */
7935
7936
7937 var getPeriodStart = function getPeriodStart(_ref) {
7938 var attributes = _ref.attributes,
7939 priorPeriodAttributes = _ref.priorPeriodAttributes,
7940 mpdType = _ref.mpdType; // Summary of period start time calculation from DASH spec section 5.3.2.1
7941 //
7942 // A period's start is the first period's start + time elapsed after playing all
7943 // prior periods to this one. Periods continue one after the other in time (without
7944 // gaps) until the end of the presentation.
7945 //
7946 // The value of Period@start should be:
7947 // 1. if Period@start is present: value of Period@start
7948 // 2. if previous period exists and it has @duration: previous Period@start +
7949 // previous Period@duration
7950 // 3. if this is first period and MPD@type is 'static': 0
7951 // 4. in all other cases, consider the period an "early available period" (note: not
7952 // currently supported)
7953 // (1)
7954
7955 if (typeof attributes.start === 'number') {
7956 return attributes.start;
7957 } // (2)
7958
7959
7960 if (priorPeriodAttributes && typeof priorPeriodAttributes.start === 'number' && typeof priorPeriodAttributes.duration === 'number') {
7961 return priorPeriodAttributes.start + priorPeriodAttributes.duration;
7962 } // (3)
7963
7964
7965 if (!priorPeriodAttributes && mpdType === 'static') {
7966 return 0;
7967 } // (4)
7968 // There is currently no logic for calculating the Period@start value if there is
7969 // no Period@start or prior Period@start and Period@duration available. This is not made
7970 // explicit by the DASH interop guidelines or the DASH spec, however, since there's
7971 // nothing about any other resolution strategies, it's implied. Thus, this case should
7972 // be considered an early available period, or error, and null should suffice for both
7973 // of those cases.
7974
7975
7976 return null;
7977 };
7978 /**
7979 * Traverses the mpd xml tree to generate a list of Representation information objects
7980 * that have inherited attributes from parent nodes
7981 *
7982 * @param {Node} mpd
7983 * The root node of the mpd
7984 * @param {Object} options
7985 * Available options for inheritAttributes
7986 * @param {string} options.manifestUri
7987 * The uri source of the mpd
7988 * @param {number} options.NOW
7989 * Current time per DASH IOP. Default is current time in ms since epoch
7990 * @param {number} options.clientOffset
7991 * Client time difference from NOW (in milliseconds)
7992 * @return {RepresentationInformation[]}
7993 * List of objects containing Representation information
7994 */
7995
7996
7997 var inheritAttributes = function inheritAttributes(mpd, options) {
7998 if (options === void 0) {
7999 options = {};
8000 }
8001
8002 var _options = options,
8003 _options$manifestUri = _options.manifestUri,
8004 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
8005 _options$NOW = _options.NOW,
8006 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
8007 _options$clientOffset = _options.clientOffset,
8008 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
8009 var periodNodes = findChildren(mpd, 'Period');
8010
8011 if (!periodNodes.length) {
8012 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
8013 }
8014
8015 var locations = findChildren(mpd, 'Location');
8016 var mpdAttributes = parseAttributes(mpd);
8017 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL')); // See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.
8018
8019 mpdAttributes.type = mpdAttributes.type || 'static';
8020 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
8021 mpdAttributes.NOW = NOW;
8022 mpdAttributes.clientOffset = clientOffset;
8023
8024 if (locations.length) {
8025 mpdAttributes.locations = locations.map(getContent);
8026 }
8027
8028 var periods = []; // Since toAdaptationSets acts on individual periods right now, the simplest approach to
8029 // adding properties that require looking at prior periods is to parse attributes and add
8030 // missing ones before toAdaptationSets is called. If more such properties are added, it
8031 // may be better to refactor toAdaptationSets.
8032
8033 periodNodes.forEach(function (node, index) {
8034 var attributes = parseAttributes(node); // Use the last modified prior period, as it may contain added information necessary
8035 // for this period.
8036
8037 var priorPeriod = periods[index - 1];
8038 attributes.start = getPeriodStart({
8039 attributes: attributes,
8040 priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,
8041 mpdType: mpdAttributes.type
8042 });
8043 periods.push({
8044 node: node,
8045 attributes: attributes
8046 });
8047 });
8048 return {
8049 locations: mpdAttributes.locations,
8050 representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)))
8051 };
8052 };
8053
8054 var stringToMpdXml = function stringToMpdXml(manifestString) {
8055 if (manifestString === '') {
8056 throw new Error(errors.DASH_EMPTY_MANIFEST);
8057 }
8058
8059 var parser = new xmldom.DOMParser();
8060 var xml;
8061 var mpd;
8062
8063 try {
8064 xml = parser.parseFromString(manifestString, 'application/xml');
8065 mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
8066 } catch (e) {// ie 11 throwsw on invalid xml
8067 }
8068
8069 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
8070 throw new Error(errors.DASH_INVALID_XML);
8071 }
8072
8073 return mpd;
8074 };
8075 /**
8076 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8077 *
8078 * @param {string} mpd
8079 * XML string of the MPD manifest
8080 * @return {Object|null}
8081 * Attributes of UTCTiming node specified in the manifest. Null if none found
8082 */
8083
8084
8085 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
8086 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
8087
8088 if (!UTCTimingNode) {
8089 return null;
8090 }
8091
8092 var attributes = parseAttributes(UTCTimingNode);
8093
8094 switch (attributes.schemeIdUri) {
8095 case 'urn:mpeg:dash:utc:http-head:2014':
8096 case 'urn:mpeg:dash:utc:http-head:2012':
8097 attributes.method = 'HEAD';
8098 break;
8099
8100 case 'urn:mpeg:dash:utc:http-xsdate:2014':
8101 case 'urn:mpeg:dash:utc:http-iso:2014':
8102 case 'urn:mpeg:dash:utc:http-xsdate:2012':
8103 case 'urn:mpeg:dash:utc:http-iso:2012':
8104 attributes.method = 'GET';
8105 break;
8106
8107 case 'urn:mpeg:dash:utc:direct:2014':
8108 case 'urn:mpeg:dash:utc:direct:2012':
8109 attributes.method = 'DIRECT';
8110 attributes.value = Date.parse(attributes.value);
8111 break;
8112
8113 case 'urn:mpeg:dash:utc:http-ntp:2014':
8114 case 'urn:mpeg:dash:utc:ntp:2014':
8115 case 'urn:mpeg:dash:utc:sntp:2014':
8116 default:
8117 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
8118 }
8119
8120 return attributes;
8121 };
8122 /*
8123 * Given a DASH manifest string and options, parses the DASH manifest into an object in the
8124 * form outputed by m3u8-parser and accepted by videojs/http-streaming.
8125 *
8126 * For live DASH manifests, if `previousManifest` is provided in options, then the newly
8127 * parsed DASH manifest will have its media sequence and discontinuity sequence values
8128 * updated to reflect its position relative to the prior manifest.
8129 *
8130 * @param {string} manifestString - the DASH manifest as a string
8131 * @param {options} [options] - any options
8132 *
8133 * @return {Object} the manifest object
8134 */
8135
8136 var parse = function parse(manifestString, options) {
8137 if (options === void 0) {
8138 options = {};
8139 }
8140
8141 var parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);
8142 var playlists = toPlaylists(parsedManifestInfo.representationInfo);
8143 return toM3u8({
8144 dashPlaylists: playlists,
8145 locations: parsedManifestInfo.locations,
8146 sidxMapping: options.sidxMapping,
8147 previousManifest: options.previousManifest
8148 });
8149 };
8150 /**
8151 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8152 *
8153 * @param {string} manifestString
8154 * XML string of the MPD manifest
8155 * @return {Object|null}
8156 * Attributes of UTCTiming node specified in the manifest. Null if none found
8157 */
8158
8159
8160 var parseUTCTiming = function parseUTCTiming(manifestString) {
8161 return parseUTCTimingScheme(stringToMpdXml(manifestString));
8162 };
8163
8164 var MAX_UINT32 = Math.pow(2, 32);
8165
8166 var getUint64$1 = function getUint64(uint8) {
8167 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
8168 var value;
8169
8170 if (dv.getBigUint64) {
8171 value = dv.getBigUint64(0);
8172
8173 if (value < Number.MAX_SAFE_INTEGER) {
8174 return Number(value);
8175 }
8176
8177 return value;
8178 }
8179
8180 return dv.getUint32(0) * MAX_UINT32 + dv.getUint32(4);
8181 };
8182
8183 var numbers = {
8184 getUint64: getUint64$1,
8185 MAX_UINT32: MAX_UINT32
8186 };
8187
8188 var getUint64 = numbers.getUint64;
8189
8190 var parseSidx = function parseSidx(data) {
8191 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
8192 result = {
8193 version: data[0],
8194 flags: new Uint8Array(data.subarray(1, 4)),
8195 references: [],
8196 referenceId: view.getUint32(4),
8197 timescale: view.getUint32(8)
8198 },
8199 i = 12;
8200
8201 if (result.version === 0) {
8202 result.earliestPresentationTime = view.getUint32(i);
8203 result.firstOffset = view.getUint32(i + 4);
8204 i += 8;
8205 } else {
8206 // read 64 bits
8207 result.earliestPresentationTime = getUint64(data.subarray(i));
8208 result.firstOffset = getUint64(data.subarray(i + 8));
8209 i += 16;
8210 }
8211
8212 i += 2; // reserved
8213
8214 var referenceCount = view.getUint16(i);
8215 i += 2; // start of references
8216
8217 for (; referenceCount > 0; i += 12, referenceCount--) {
8218 result.references.push({
8219 referenceType: (data[i] & 0x80) >>> 7,
8220 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
8221 subsegmentDuration: view.getUint32(i + 4),
8222 startsWithSap: !!(data[i + 8] & 0x80),
8223 sapType: (data[i + 8] & 0x70) >>> 4,
8224 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
8225 });
8226 }
8227
8228 return result;
8229 };
8230
8231 var parseSidx_1 = parseSidx;
8232
8233 var ID3 = toUint8([0x49, 0x44, 0x33]);
8234 var getId3Size = function getId3Size(bytes, offset) {
8235 if (offset === void 0) {
8236 offset = 0;
8237 }
8238
8239 bytes = toUint8(bytes);
8240 var flags = bytes[offset + 5];
8241 var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
8242 var footerPresent = (flags & 16) >> 4;
8243
8244 if (footerPresent) {
8245 return returnSize + 20;
8246 }
8247
8248 return returnSize + 10;
8249 };
8250 var getId3Offset = function getId3Offset(bytes, offset) {
8251 if (offset === void 0) {
8252 offset = 0;
8253 }
8254
8255 bytes = toUint8(bytes);
8256
8257 if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
8258 offset: offset
8259 })) {
8260 return offset;
8261 }
8262
8263 offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
8264 // have multiple ID3 tag sections even though
8265 // they should not.
8266
8267 return getId3Offset(bytes, offset);
8268 };
8269
8270 var normalizePath$1 = function normalizePath(path) {
8271 if (typeof path === 'string') {
8272 return stringToBytes(path);
8273 }
8274
8275 if (typeof path === 'number') {
8276 return path;
8277 }
8278
8279 return path;
8280 };
8281
8282 var normalizePaths$1 = function normalizePaths(paths) {
8283 if (!Array.isArray(paths)) {
8284 return [normalizePath$1(paths)];
8285 }
8286
8287 return paths.map(function (p) {
8288 return normalizePath$1(p);
8289 });
8290 };
8291 /**
8292 * find any number of boxes by name given a path to it in an iso bmff
8293 * such as mp4.
8294 *
8295 * @param {TypedArray} bytes
8296 * bytes for the iso bmff to search for boxes in
8297 *
8298 * @param {Uint8Array[]|string[]|string|Uint8Array} name
8299 * An array of paths or a single path representing the name
8300 * of boxes to search through in bytes. Paths may be
8301 * uint8 (character codes) or strings.
8302 *
8303 * @param {boolean} [complete=false]
8304 * Should we search only for complete boxes on the final path.
8305 * This is very useful when you do not want to get back partial boxes
8306 * in the case of streaming files.
8307 *
8308 * @return {Uint8Array[]}
8309 * An array of the end paths that we found.
8310 */
8311
8312 var findBox = function findBox(bytes, paths, complete) {
8313 if (complete === void 0) {
8314 complete = false;
8315 }
8316
8317 paths = normalizePaths$1(paths);
8318 bytes = toUint8(bytes);
8319 var results = [];
8320
8321 if (!paths.length) {
8322 // short-circuit the search for empty paths
8323 return results;
8324 }
8325
8326 var i = 0;
8327
8328 while (i < bytes.length) {
8329 var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
8330 var type = bytes.subarray(i + 4, i + 8); // invalid box format.
8331
8332 if (size === 0) {
8333 break;
8334 }
8335
8336 var end = i + size;
8337
8338 if (end > bytes.length) {
8339 // this box is bigger than the number of bytes we have
8340 // and complete is set, we cannot find any more boxes.
8341 if (complete) {
8342 break;
8343 }
8344
8345 end = bytes.length;
8346 }
8347
8348 var data = bytes.subarray(i + 8, end);
8349
8350 if (bytesMatch(type, paths[0])) {
8351 if (paths.length === 1) {
8352 // this is the end of the path and we've found the box we were
8353 // looking for
8354 results.push(data);
8355 } else {
8356 // recursively search for the next box along the path
8357 results.push.apply(results, findBox(data, paths.slice(1), complete));
8358 }
8359 }
8360
8361 i = end;
8362 } // we've finished searching all of bytes
8363
8364
8365 return results;
8366 };
8367
8368 // https://matroska-org.github.io/libebml/specs.html
8369 // https://www.matroska.org/technical/elements.html
8370 // https://www.webmproject.org/docs/container/
8371
8372 var EBML_TAGS = {
8373 EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
8374 DocType: toUint8([0x42, 0x82]),
8375 Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
8376 SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
8377 Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
8378 Track: toUint8([0xAE]),
8379 TrackNumber: toUint8([0xd7]),
8380 DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
8381 TrackEntry: toUint8([0xAE]),
8382 TrackType: toUint8([0x83]),
8383 FlagDefault: toUint8([0x88]),
8384 CodecID: toUint8([0x86]),
8385 CodecPrivate: toUint8([0x63, 0xA2]),
8386 VideoTrack: toUint8([0xe0]),
8387 AudioTrack: toUint8([0xe1]),
8388 // Not used yet, but will be used for live webm/mkv
8389 // see https://www.matroska.org/technical/basics.html#block-structure
8390 // see https://www.matroska.org/technical/basics.html#simpleblock-structure
8391 Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
8392 Timestamp: toUint8([0xE7]),
8393 TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
8394 BlockGroup: toUint8([0xA0]),
8395 BlockDuration: toUint8([0x9B]),
8396 Block: toUint8([0xA1]),
8397 SimpleBlock: toUint8([0xA3])
8398 };
8399 /**
8400 * This is a simple table to determine the length
8401 * of things in ebml. The length is one based (starts at 1,
8402 * rather than zero) and for every zero bit before a one bit
8403 * we add one to length. We also need this table because in some
8404 * case we have to xor all the length bits from another value.
8405 */
8406
8407 var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
8408
8409 var getLength = function getLength(byte) {
8410 var len = 1;
8411
8412 for (var i = 0; i < LENGTH_TABLE.length; i++) {
8413 if (byte & LENGTH_TABLE[i]) {
8414 break;
8415 }
8416
8417 len++;
8418 }
8419
8420 return len;
8421 }; // length in ebml is stored in the first 4 to 8 bits
8422 // of the first byte. 4 for the id length and 8 for the
8423 // data size length. Length is measured by converting the number to binary
8424 // then 1 + the number of zeros before a 1 is encountered starting
8425 // from the left.
8426
8427
8428 var getvint = function getvint(bytes, offset, removeLength, signed) {
8429 if (removeLength === void 0) {
8430 removeLength = true;
8431 }
8432
8433 if (signed === void 0) {
8434 signed = false;
8435 }
8436
8437 var length = getLength(bytes[offset]);
8438 var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
8439 // as they will be modified below to remove the dataSizeLen bits and we do not
8440 // want to modify the original data. normally we could just call slice on
8441 // uint8array but ie 11 does not support that...
8442
8443 if (removeLength) {
8444 valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
8445 valueBytes[0] ^= LENGTH_TABLE[length - 1];
8446 }
8447
8448 return {
8449 length: length,
8450 value: bytesToNumber(valueBytes, {
8451 signed: signed
8452 }),
8453 bytes: valueBytes
8454 };
8455 };
8456
8457 var normalizePath = function normalizePath(path) {
8458 if (typeof path === 'string') {
8459 return path.match(/.{1,2}/g).map(function (p) {
8460 return normalizePath(p);
8461 });
8462 }
8463
8464 if (typeof path === 'number') {
8465 return numberToBytes(path);
8466 }
8467
8468 return path;
8469 };
8470
8471 var normalizePaths = function normalizePaths(paths) {
8472 if (!Array.isArray(paths)) {
8473 return [normalizePath(paths)];
8474 }
8475
8476 return paths.map(function (p) {
8477 return normalizePath(p);
8478 });
8479 };
8480
8481 var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
8482 if (offset >= bytes.length) {
8483 return bytes.length;
8484 }
8485
8486 var innerid = getvint(bytes, offset, false);
8487
8488 if (bytesMatch(id.bytes, innerid.bytes)) {
8489 return offset;
8490 }
8491
8492 var dataHeader = getvint(bytes, offset + innerid.length);
8493 return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
8494 };
8495 /**
8496 * Notes on the EBLM format.
8497 *
8498 * EBLM uses "vints" tags. Every vint tag contains
8499 * two parts
8500 *
8501 * 1. The length from the first byte. You get this by
8502 * converting the byte to binary and counting the zeros
8503 * before a 1. Then you add 1 to that. Examples
8504 * 00011111 = length 4 because there are 3 zeros before a 1.
8505 * 00100000 = length 3 because there are 2 zeros before a 1.
8506 * 00000011 = length 7 because there are 6 zeros before a 1.
8507 *
8508 * 2. The bits used for length are removed from the first byte
8509 * Then all the bytes are merged into a value. NOTE: this
8510 * is not the case for id ebml tags as there id includes
8511 * length bits.
8512 *
8513 */
8514
8515
8516 var findEbml = function findEbml(bytes, paths) {
8517 paths = normalizePaths(paths);
8518 bytes = toUint8(bytes);
8519 var results = [];
8520
8521 if (!paths.length) {
8522 return results;
8523 }
8524
8525 var i = 0;
8526
8527 while (i < bytes.length) {
8528 var id = getvint(bytes, i, false);
8529 var dataHeader = getvint(bytes, i + id.length);
8530 var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
8531
8532 if (dataHeader.value === 0x7f) {
8533 dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
8534
8535 if (dataHeader.value !== bytes.length) {
8536 dataHeader.value -= dataStart;
8537 }
8538 }
8539
8540 var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
8541 var data = bytes.subarray(dataStart, dataEnd);
8542
8543 if (bytesMatch(paths[0], id.bytes)) {
8544 if (paths.length === 1) {
8545 // this is the end of the paths and we've found the tag we were
8546 // looking for
8547 results.push(data);
8548 } else {
8549 // recursively search for the next tag inside of the data
8550 // of this one
8551 results = results.concat(findEbml(data, paths.slice(1)));
8552 }
8553 }
8554
8555 var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
8556
8557 i += totalLength;
8558 }
8559
8560 return results;
8561 }; // see https://www.matroska.org/technical/basics.html#block-structure
8562
8563 var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
8564 var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
8565 var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
8566 /**
8567 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8568 * Sequence Payload"
8569 *
8570 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8571 * unit
8572 * @return {Uint8Array} the RBSP without any Emulation
8573 * Prevention Bytes
8574 */
8575
8576 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
8577 var positions = [];
8578 var i = 1; // Find all `Emulation Prevention Bytes`
8579
8580 while (i < bytes.length - 2) {
8581 if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
8582 positions.push(i + 2);
8583 i++;
8584 }
8585
8586 i++;
8587 } // If no Emulation Prevention Bytes were found just return the original
8588 // array
8589
8590
8591 if (positions.length === 0) {
8592 return bytes;
8593 } // Create a new array to hold the NAL unit data
8594
8595
8596 var newLength = bytes.length - positions.length;
8597 var newData = new Uint8Array(newLength);
8598 var sourceIndex = 0;
8599
8600 for (i = 0; i < newLength; sourceIndex++, i++) {
8601 if (sourceIndex === positions[0]) {
8602 // Skip this byte
8603 sourceIndex++; // Remove this position index
8604
8605 positions.shift();
8606 }
8607
8608 newData[i] = bytes[sourceIndex];
8609 }
8610
8611 return newData;
8612 };
8613 var findNal = function findNal(bytes, dataType, types, nalLimit) {
8614 if (nalLimit === void 0) {
8615 nalLimit = Infinity;
8616 }
8617
8618 bytes = toUint8(bytes);
8619 types = [].concat(types);
8620 var i = 0;
8621 var nalStart;
8622 var nalsFound = 0; // keep searching until:
8623 // we reach the end of bytes
8624 // we reach the maximum number of nals they want to seach
8625 // NOTE: that we disregard nalLimit when we have found the start
8626 // of the nal we want so that we can find the end of the nal we want.
8627
8628 while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
8629 var nalOffset = void 0;
8630
8631 if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
8632 nalOffset = 4;
8633 } else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
8634 nalOffset = 3;
8635 } // we are unsynced,
8636 // find the next nal unit
8637
8638
8639 if (!nalOffset) {
8640 i++;
8641 continue;
8642 }
8643
8644 nalsFound++;
8645
8646 if (nalStart) {
8647 return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
8648 }
8649
8650 var nalType = void 0;
8651
8652 if (dataType === 'h264') {
8653 nalType = bytes[i + nalOffset] & 0x1f;
8654 } else if (dataType === 'h265') {
8655 nalType = bytes[i + nalOffset] >> 1 & 0x3f;
8656 }
8657
8658 if (types.indexOf(nalType) !== -1) {
8659 nalStart = i + nalOffset;
8660 } // nal header is 1 length for h264, and 2 for h265
8661
8662
8663 i += nalOffset + (dataType === 'h264' ? 1 : 2);
8664 }
8665
8666 return bytes.subarray(0, 0);
8667 };
8668 var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
8669 return findNal(bytes, 'h264', type, nalLimit);
8670 };
8671 var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
8672 return findNal(bytes, 'h265', type, nalLimit);
8673 };
8674
8675 var CONSTANTS = {
8676 // "webm" string literal in hex
8677 'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
8678 // "matroska" string literal in hex
8679 'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
8680 // "fLaC" string literal in hex
8681 'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
8682 // "OggS" string literal in hex
8683 'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
8684 // ac-3 sync byte, also works for ec-3 as that is simply a codec
8685 // of ac-3
8686 'ac3': toUint8([0x0b, 0x77]),
8687 // "RIFF" string literal in hex used for wav and avi
8688 'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
8689 // "AVI" string literal in hex
8690 'avi': toUint8([0x41, 0x56, 0x49]),
8691 // "WAVE" string literal in hex
8692 'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
8693 // "ftyp3g" string literal in hex
8694 '3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
8695 // "ftyp" string literal in hex
8696 'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
8697 // "styp" string literal in hex
8698 'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
8699 // "ftypqt" string literal in hex
8700 'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
8701 // moov string literal in hex
8702 'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
8703 // moof string literal in hex
8704 'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
8705 };
8706 var _isLikely = {
8707 aac: function aac(bytes) {
8708 var offset = getId3Offset(bytes);
8709 return bytesMatch(bytes, [0xFF, 0x10], {
8710 offset: offset,
8711 mask: [0xFF, 0x16]
8712 });
8713 },
8714 mp3: function mp3(bytes) {
8715 var offset = getId3Offset(bytes);
8716 return bytesMatch(bytes, [0xFF, 0x02], {
8717 offset: offset,
8718 mask: [0xFF, 0x06]
8719 });
8720 },
8721 webm: function webm(bytes) {
8722 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
8723
8724 return bytesMatch(docType, CONSTANTS.webm);
8725 },
8726 mkv: function mkv(bytes) {
8727 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
8728
8729 return bytesMatch(docType, CONSTANTS.matroska);
8730 },
8731 mp4: function mp4(bytes) {
8732 // if this file is another base media file format, it is not mp4
8733 if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
8734 return false;
8735 } // if this file starts with a ftyp or styp box its mp4
8736
8737
8738 if (bytesMatch(bytes, CONSTANTS.mp4, {
8739 offset: 4
8740 }) || bytesMatch(bytes, CONSTANTS.fmp4, {
8741 offset: 4
8742 })) {
8743 return true;
8744 } // if this file starts with a moof/moov box its mp4
8745
8746
8747 if (bytesMatch(bytes, CONSTANTS.moof, {
8748 offset: 4
8749 }) || bytesMatch(bytes, CONSTANTS.moov, {
8750 offset: 4
8751 })) {
8752 return true;
8753 }
8754 },
8755 mov: function mov(bytes) {
8756 return bytesMatch(bytes, CONSTANTS.mov, {
8757 offset: 4
8758 });
8759 },
8760 '3gp': function gp(bytes) {
8761 return bytesMatch(bytes, CONSTANTS['3gp'], {
8762 offset: 4
8763 });
8764 },
8765 ac3: function ac3(bytes) {
8766 var offset = getId3Offset(bytes);
8767 return bytesMatch(bytes, CONSTANTS.ac3, {
8768 offset: offset
8769 });
8770 },
8771 ts: function ts(bytes) {
8772 if (bytes.length < 189 && bytes.length >= 1) {
8773 return bytes[0] === 0x47;
8774 }
8775
8776 var i = 0; // check the first 376 bytes for two matching sync bytes
8777
8778 while (i + 188 < bytes.length && i < 188) {
8779 if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
8780 return true;
8781 }
8782
8783 i += 1;
8784 }
8785
8786 return false;
8787 },
8788 flac: function flac(bytes) {
8789 var offset = getId3Offset(bytes);
8790 return bytesMatch(bytes, CONSTANTS.flac, {
8791 offset: offset
8792 });
8793 },
8794 ogg: function ogg(bytes) {
8795 return bytesMatch(bytes, CONSTANTS.ogg);
8796 },
8797 avi: function avi(bytes) {
8798 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
8799 offset: 8
8800 });
8801 },
8802 wav: function wav(bytes) {
8803 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
8804 offset: 8
8805 });
8806 },
8807 'h264': function h264(bytes) {
8808 // find seq_parameter_set_rbsp
8809 return findH264Nal(bytes, 7, 3).length;
8810 },
8811 'h265': function h265(bytes) {
8812 // find video_parameter_set_rbsp or seq_parameter_set_rbsp
8813 return findH265Nal(bytes, [32, 33], 3).length;
8814 }
8815 }; // get all the isLikely functions
8816 // but make sure 'ts' is above h264 and h265
8817 // but below everything else as it is the least specific
8818
8819 var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
8820 .filter(function (t) {
8821 return t !== 'ts' && t !== 'h264' && t !== 'h265';
8822 }) // add it back to the bottom
8823 .concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
8824
8825 isLikelyTypes.forEach(function (type) {
8826 var isLikelyFn = _isLikely[type];
8827
8828 _isLikely[type] = function (bytes) {
8829 return isLikelyFn(toUint8(bytes));
8830 };
8831 }); // export after wrapping
8832
8833 var isLikely = _isLikely; // A useful list of file signatures can be found here
8834 // https://en.wikipedia.org/wiki/List_of_file_signatures
8835
8836 var detectContainerForBytes = function detectContainerForBytes(bytes) {
8837 bytes = toUint8(bytes);
8838
8839 for (var i = 0; i < isLikelyTypes.length; i++) {
8840 var type = isLikelyTypes[i];
8841
8842 if (isLikely[type](bytes)) {
8843 return type;
8844 }
8845 }
8846
8847 return '';
8848 }; // fmp4 is not a container
8849
8850 var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
8851 return findBox(bytes, ['moof']).length > 0;
8852 };
8853
8854 // which will only happen if the request is complete.
8855
8856 var callbackOnCompleted = function callbackOnCompleted(request, cb) {
8857 if (request.readyState === 4) {
8858 return cb();
8859 }
8860
8861 return;
8862 };
8863
8864 var containerRequest = function containerRequest(uri, xhr, cb) {
8865 var bytes = [];
8866 var id3Offset;
8867 var finished = false;
8868
8869 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
8870 req.abort();
8871 finished = true;
8872 return cb(err, req, type, _bytes);
8873 };
8874
8875 var progressListener = function progressListener(error, request) {
8876 if (finished) {
8877 return;
8878 }
8879
8880 if (error) {
8881 return endRequestAndCallback(error, request, '', bytes);
8882 } // grap the new part of content that was just downloaded
8883
8884
8885 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
8886
8887 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
8888 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
8889 // or we need at least two bytes after an id3Offset
8890
8891 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
8892 return callbackOnCompleted(request, function () {
8893 return endRequestAndCallback(error, request, '', bytes);
8894 });
8895 }
8896
8897 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
8898 // to see the second sync byte, wait until we have enough data
8899 // before declaring it ts
8900
8901 if (type === 'ts' && bytes.length < 188) {
8902 return callbackOnCompleted(request, function () {
8903 return endRequestAndCallback(error, request, '', bytes);
8904 });
8905 } // this may be an unsynced ts segment
8906 // wait for 376 bytes before detecting no container
8907
8908
8909 if (!type && bytes.length < 376) {
8910 return callbackOnCompleted(request, function () {
8911 return endRequestAndCallback(error, request, '', bytes);
8912 });
8913 }
8914
8915 return endRequestAndCallback(null, request, type, bytes);
8916 };
8917
8918 var options = {
8919 uri: uri,
8920 beforeSend: function beforeSend(request) {
8921 // this forces the browser to pass the bytes to us unprocessed
8922 request.overrideMimeType('text/plain; charset=x-user-defined');
8923 request.addEventListener('progress', function (_ref) {
8924 _ref.total;
8925 _ref.loaded;
8926 return callbackWrapper(request, null, {
8927 statusCode: request.status
8928 }, progressListener);
8929 });
8930 }
8931 };
8932 var request = xhr(options, function (error, response) {
8933 return callbackWrapper(request, error, response, progressListener);
8934 });
8935 return request;
8936 };
8937
8938 var EventTarget = videojs__default["default"].EventTarget,
8939 mergeOptions = videojs__default["default"].mergeOptions;
8940
8941 var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
8942 if (!isPlaylistUnchanged(a, b)) {
8943 return false;
8944 } // for dash the above check will often return true in scenarios where
8945 // the playlist actually has changed because mediaSequence isn't a
8946 // dash thing, and we often set it to 1. So if the playlists have the same amount
8947 // of segments we return true.
8948 // So for dash we need to make sure that the underlying segments are different.
8949 // if sidx changed then the playlists are different.
8950
8951
8952 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
8953 return false;
8954 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
8955 return false;
8956 } // one or the other does not have segments
8957 // there was a change.
8958
8959
8960 if (a.segments && !b.segments || !a.segments && b.segments) {
8961 return false;
8962 } // neither has segments nothing changed
8963
8964
8965 if (!a.segments && !b.segments) {
8966 return true;
8967 } // check segments themselves
8968
8969
8970 for (var i = 0; i < a.segments.length; i++) {
8971 var aSegment = a.segments[i];
8972 var bSegment = b.segments[i]; // if uris are different between segments there was a change
8973
8974 if (aSegment.uri !== bSegment.uri) {
8975 return false;
8976 } // neither segment has a byterange, there will be no byterange change.
8977
8978
8979 if (!aSegment.byterange && !bSegment.byterange) {
8980 continue;
8981 }
8982
8983 var aByterange = aSegment.byterange;
8984 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
8985
8986 if (aByterange && !bByterange || !aByterange && bByterange) {
8987 return false;
8988 } // if both segments have byterange with different offsets, there was a change.
8989
8990
8991 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
8992 return false;
8993 }
8994 } // if everything was the same with segments, this is the same playlist.
8995
8996
8997 return true;
8998 };
8999 /**
9000 * Parses the master XML string and updates playlist URI references.
9001 *
9002 * @param {Object} config
9003 * Object of arguments
9004 * @param {string} config.masterXml
9005 * The mpd XML
9006 * @param {string} config.srcUrl
9007 * The mpd URL
9008 * @param {Date} config.clientOffset
9009 * A time difference between server and client
9010 * @param {Object} config.sidxMapping
9011 * SIDX mappings for moof/mdat URIs and byte ranges
9012 * @return {Object}
9013 * The parsed mpd manifest object
9014 */
9015
9016
9017 var parseMasterXml = function parseMasterXml(_ref) {
9018 var masterXml = _ref.masterXml,
9019 srcUrl = _ref.srcUrl,
9020 clientOffset = _ref.clientOffset,
9021 sidxMapping = _ref.sidxMapping,
9022 previousManifest = _ref.previousManifest;
9023 var manifest = parse(masterXml, {
9024 manifestUri: srcUrl,
9025 clientOffset: clientOffset,
9026 sidxMapping: sidxMapping,
9027 previousManifest: previousManifest
9028 });
9029 addPropertiesToMaster(manifest, srcUrl);
9030 return manifest;
9031 };
9032 /**
9033 * Returns a new master manifest that is the result of merging an updated master manifest
9034 * into the original version.
9035 *
9036 * @param {Object} oldMaster
9037 * The old parsed mpd object
9038 * @param {Object} newMaster
9039 * The updated parsed mpd object
9040 * @return {Object}
9041 * A new object representing the original master manifest with the updated media
9042 * playlists merged in
9043 */
9044
9045 var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
9046 var noChanges = true;
9047 var update = mergeOptions(oldMaster, {
9048 // These are top level properties that can be updated
9049 duration: newMaster.duration,
9050 minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
9051 timelineStarts: newMaster.timelineStarts
9052 }); // First update the playlists in playlist list
9053
9054 for (var i = 0; i < newMaster.playlists.length; i++) {
9055 var playlist = newMaster.playlists[i];
9056
9057 if (playlist.sidx) {
9058 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
9059
9060 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
9061 addSidxSegmentsToPlaylist$1(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
9062 }
9063 }
9064
9065 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
9066
9067 if (playlistUpdate) {
9068 update = playlistUpdate;
9069 noChanges = false;
9070 }
9071 } // Then update media group playlists
9072
9073
9074 forEachMediaGroup$1(newMaster, function (properties, type, group, label) {
9075 if (properties.playlists && properties.playlists.length) {
9076 var id = properties.playlists[0].id;
9077
9078 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
9079
9080 if (_playlistUpdate) {
9081 update = _playlistUpdate; // update the playlist reference within media groups
9082
9083 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
9084 noChanges = false;
9085 }
9086 }
9087 });
9088
9089 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9090 noChanges = false;
9091 }
9092
9093 if (noChanges) {
9094 return null;
9095 }
9096
9097 return update;
9098 }; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
9099 // If the SIDXs have maps, the two maps should match,
9100 // both `a` and `b` missing SIDXs is considered matching.
9101 // If `a` or `b` but not both have a map, they aren't matching.
9102
9103 var equivalentSidx = function equivalentSidx(a, b) {
9104 var neitherMap = Boolean(!a.map && !b.map);
9105 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
9106 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
9107 }; // exported for testing
9108
9109
9110 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
9111 var newSidxMapping = {};
9112
9113 for (var id in playlists) {
9114 var playlist = playlists[id];
9115 var currentSidxInfo = playlist.sidx;
9116
9117 if (currentSidxInfo) {
9118 var key = generateSidxKey(currentSidxInfo);
9119
9120 if (!oldSidxMapping[key]) {
9121 break;
9122 }
9123
9124 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
9125
9126 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
9127 newSidxMapping[key] = oldSidxMapping[key];
9128 }
9129 }
9130 }
9131
9132 return newSidxMapping;
9133 };
9134 /**
9135 * A function that filters out changed items as they need to be requested separately.
9136 *
9137 * The method is exported for testing
9138 *
9139 * @param {Object} master the parsed mpd XML returned via mpd-parser
9140 * @param {Object} oldSidxMapping the SIDX to compare against
9141 */
9142
9143 var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
9144 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
9145 var mediaGroupSidx = videoSidx;
9146 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
9147 if (properties.playlists && properties.playlists.length) {
9148 var playlists = properties.playlists;
9149 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
9150 }
9151 });
9152 return mediaGroupSidx;
9153 };
9154
9155 var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
9156 inheritsLoose(DashPlaylistLoader, _EventTarget);
9157
9158 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
9159 // playlist loader setups from media groups will expect to be able to pass a playlist
9160 // (since there aren't external URLs to media playlists with DASH)
9161 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
9162 var _this;
9163
9164 if (options === void 0) {
9165 options = {};
9166 }
9167
9168 _this = _EventTarget.call(this) || this;
9169 _this.masterPlaylistLoader_ = masterPlaylistLoader || assertThisInitialized(_this);
9170
9171 if (!masterPlaylistLoader) {
9172 _this.isMaster_ = true;
9173 }
9174
9175 var _options = options,
9176 _options$withCredenti = _options.withCredentials,
9177 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
9178 _options$handleManife = _options.handleManifestRedirects,
9179 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
9180 _this.vhs_ = vhs;
9181 _this.withCredentials = withCredentials;
9182 _this.handleManifestRedirects = handleManifestRedirects;
9183
9184 if (!srcUrlOrPlaylist) {
9185 throw new Error('A non-empty playlist URL or object is required');
9186 } // event naming?
9187
9188
9189 _this.on('minimumUpdatePeriod', function () {
9190 _this.refreshXml_();
9191 }); // live playlist staleness timeout
9192
9193
9194 _this.on('mediaupdatetimeout', function () {
9195 _this.refreshMedia_(_this.media().id);
9196 });
9197
9198 _this.state = 'HAVE_NOTHING';
9199 _this.loadedPlaylists_ = {};
9200 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
9201 // The masterPlaylistLoader will be created with a string
9202
9203 if (_this.isMaster_) {
9204 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
9205 // once multi-period is refactored
9206
9207 _this.masterPlaylistLoader_.sidxMapping_ = {};
9208 } else {
9209 _this.childPlaylist_ = srcUrlOrPlaylist;
9210 }
9211
9212 return _this;
9213 }
9214
9215 var _proto = DashPlaylistLoader.prototype;
9216
9217 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
9218 // disposed
9219 if (!this.request) {
9220 return true;
9221 } // pending request is cleared
9222
9223
9224 this.request = null;
9225
9226 if (err) {
9227 // use the provided error object or create one
9228 // based on the request/response
9229 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
9230 status: request.status,
9231 message: 'DASH request error at URL: ' + request.uri,
9232 response: request.response,
9233 // MEDIA_ERR_NETWORK
9234 code: 2
9235 };
9236
9237 if (startingState) {
9238 this.state = startingState;
9239 }
9240
9241 this.trigger('error');
9242 return true;
9243 }
9244 }
9245 /**
9246 * Verify that the container of the sidx segment can be parsed
9247 * and if it can, get and parse that segment.
9248 */
9249 ;
9250
9251 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
9252 var _this2 = this;
9253
9254 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
9255
9256 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
9257 // keep this function async
9258 this.mediaRequest_ = window.setTimeout(function () {
9259 return cb(false);
9260 }, 0);
9261 return;
9262 } // resolve the segment URL relative to the playlist
9263
9264
9265 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
9266
9267 var fin = function fin(err, request) {
9268 if (_this2.requestErrored_(err, request, startingState)) {
9269 return;
9270 }
9271
9272 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
9273 var sidx;
9274
9275 try {
9276 sidx = parseSidx_1(toUint8(request.response).subarray(8));
9277 } catch (e) {
9278 // sidx parsing failed.
9279 _this2.requestErrored_(e, request, startingState);
9280
9281 return;
9282 }
9283
9284 sidxMapping[sidxKey] = {
9285 sidxInfo: playlist.sidx,
9286 sidx: sidx
9287 };
9288 addSidxSegmentsToPlaylist$1(playlist, sidx, playlist.sidx.resolvedUri);
9289 return cb(true);
9290 };
9291
9292 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
9293 if (err) {
9294 return fin(err, request);
9295 }
9296
9297 if (!container || container !== 'mp4') {
9298 return fin({
9299 status: request.status,
9300 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
9301 // response is just bytes in this case
9302 // but we really don't want to return that.
9303 response: '',
9304 playlist: playlist,
9305 internal: true,
9306 blacklistDuration: Infinity,
9307 // MEDIA_ERR_NETWORK
9308 code: 2
9309 }, request);
9310 } // if we already downloaded the sidx bytes in the container request, use them
9311
9312
9313 var _playlist$sidx$bytera = playlist.sidx.byterange,
9314 offset = _playlist$sidx$bytera.offset,
9315 length = _playlist$sidx$bytera.length;
9316
9317 if (bytes.length >= length + offset) {
9318 return fin(err, {
9319 response: bytes.subarray(offset, offset + length),
9320 status: request.status,
9321 uri: request.uri
9322 });
9323 } // otherwise request sidx bytes
9324
9325
9326 _this2.request = _this2.vhs_.xhr({
9327 uri: uri,
9328 responseType: 'arraybuffer',
9329 headers: segmentXhrHeaders({
9330 byterange: playlist.sidx.byterange
9331 })
9332 }, fin);
9333 });
9334 };
9335
9336 _proto.dispose = function dispose() {
9337 this.trigger('dispose');
9338 this.stopRequest();
9339 this.loadedPlaylists_ = {};
9340 window.clearTimeout(this.minimumUpdatePeriodTimeout_);
9341 window.clearTimeout(this.mediaRequest_);
9342 window.clearTimeout(this.mediaUpdateTimeout);
9343 this.mediaUpdateTimeout = null;
9344 this.mediaRequest_ = null;
9345 this.minimumUpdatePeriodTimeout_ = null;
9346
9347 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9348 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9349 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9350 }
9351
9352 this.off();
9353 };
9354
9355 _proto.hasPendingRequest = function hasPendingRequest() {
9356 return this.request || this.mediaRequest_;
9357 };
9358
9359 _proto.stopRequest = function stopRequest() {
9360 if (this.request) {
9361 var oldRequest = this.request;
9362 this.request = null;
9363 oldRequest.onreadystatechange = null;
9364 oldRequest.abort();
9365 }
9366 };
9367
9368 _proto.media = function media(playlist) {
9369 var _this3 = this;
9370
9371 // getter
9372 if (!playlist) {
9373 return this.media_;
9374 } // setter
9375
9376
9377 if (this.state === 'HAVE_NOTHING') {
9378 throw new Error('Cannot switch media playlist from ' + this.state);
9379 }
9380
9381 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
9382
9383 if (typeof playlist === 'string') {
9384 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
9385 throw new Error('Unknown playlist URI: ' + playlist);
9386 }
9387
9388 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
9389 }
9390
9391 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
9392
9393 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
9394 this.state = 'HAVE_METADATA';
9395 this.media_ = playlist; // trigger media change if the active media has been updated
9396
9397 if (mediaChange) {
9398 this.trigger('mediachanging');
9399 this.trigger('mediachange');
9400 }
9401
9402 return;
9403 } // switching to the active playlist is a no-op
9404
9405
9406 if (!mediaChange) {
9407 return;
9408 } // switching from an already loaded playlist
9409
9410
9411 if (this.media_) {
9412 this.trigger('mediachanging');
9413 }
9414
9415 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
9416 // everything is ready just continue to haveMetadata
9417 _this3.haveMetadata({
9418 startingState: startingState,
9419 playlist: playlist
9420 });
9421 });
9422 };
9423
9424 _proto.haveMetadata = function haveMetadata(_ref2) {
9425 var startingState = _ref2.startingState,
9426 playlist = _ref2.playlist;
9427 this.state = 'HAVE_METADATA';
9428 this.loadedPlaylists_[playlist.id] = playlist;
9429 this.mediaRequest_ = null; // This will trigger loadedplaylist
9430
9431 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
9432 // to resolve setup of media groups
9433
9434 if (startingState === 'HAVE_MASTER') {
9435 this.trigger('loadedmetadata');
9436 } else {
9437 // trigger media change if the active media has been updated
9438 this.trigger('mediachange');
9439 }
9440 };
9441
9442 _proto.pause = function pause() {
9443 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9444 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9445 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9446 }
9447
9448 this.stopRequest();
9449 window.clearTimeout(this.mediaUpdateTimeout);
9450 this.mediaUpdateTimeout = null;
9451
9452 if (this.isMaster_) {
9453 window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
9454 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
9455 }
9456
9457 if (this.state === 'HAVE_NOTHING') {
9458 // If we pause the loader before any data has been retrieved, its as if we never
9459 // started, so reset to an unstarted state.
9460 this.started = false;
9461 }
9462 };
9463
9464 _proto.load = function load(isFinalRendition) {
9465 var _this4 = this;
9466
9467 window.clearTimeout(this.mediaUpdateTimeout);
9468 this.mediaUpdateTimeout = null;
9469 var media = this.media();
9470
9471 if (isFinalRendition) {
9472 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
9473 this.mediaUpdateTimeout = window.setTimeout(function () {
9474 return _this4.load();
9475 }, delay);
9476 return;
9477 } // because the playlists are internal to the manifest, load should either load the
9478 // main manifest, or do nothing but trigger an event
9479
9480
9481 if (!this.started) {
9482 this.start();
9483 return;
9484 }
9485
9486 if (media && !media.endList) {
9487 // Check to see if this is the master loader and the MUP was cleared (this happens
9488 // when the loader was paused). `media` should be set at this point since one is always
9489 // set during `start()`.
9490 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
9491 // Trigger minimumUpdatePeriod to refresh the master manifest
9492 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
9493
9494 this.updateMinimumUpdatePeriodTimeout_();
9495 }
9496
9497 this.trigger('mediaupdatetimeout');
9498 } else {
9499 this.trigger('loadedplaylist');
9500 }
9501 };
9502
9503 _proto.start = function start() {
9504 var _this5 = this;
9505
9506 this.started = true; // We don't need to request the master manifest again
9507 // Call this asynchronously to match the xhr request behavior below
9508
9509 if (!this.isMaster_) {
9510 this.mediaRequest_ = window.setTimeout(function () {
9511 return _this5.haveMaster_();
9512 }, 0);
9513 return;
9514 }
9515
9516 this.requestMaster_(function (req, masterChanged) {
9517 _this5.haveMaster_();
9518
9519 if (!_this5.hasPendingRequest() && !_this5.media_) {
9520 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
9521 }
9522 });
9523 };
9524
9525 _proto.requestMaster_ = function requestMaster_(cb) {
9526 var _this6 = this;
9527
9528 this.request = this.vhs_.xhr({
9529 uri: this.masterPlaylistLoader_.srcUrl,
9530 withCredentials: this.withCredentials
9531 }, function (error, req) {
9532 if (_this6.requestErrored_(error, req)) {
9533 if (_this6.state === 'HAVE_NOTHING') {
9534 _this6.started = false;
9535 }
9536
9537 return;
9538 }
9539
9540 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
9541 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
9542
9543 if (req.responseHeaders && req.responseHeaders.date) {
9544 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
9545 } else {
9546 _this6.masterLoaded_ = Date.now();
9547 }
9548
9549 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
9550
9551 if (masterChanged) {
9552 _this6.handleMaster_();
9553
9554 _this6.syncClientServerClock_(function () {
9555 return cb(req, masterChanged);
9556 });
9557
9558 return;
9559 }
9560
9561 return cb(req, masterChanged);
9562 });
9563 }
9564 /**
9565 * Parses the master xml for UTCTiming node to sync the client clock to the server
9566 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
9567 *
9568 * @param {Function} done
9569 * Function to call when clock sync has completed
9570 */
9571 ;
9572
9573 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
9574 var _this7 = this;
9575
9576 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
9577 // server clock
9578
9579 if (utcTiming === null) {
9580 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
9581 return done();
9582 }
9583
9584 if (utcTiming.method === 'DIRECT') {
9585 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
9586 return done();
9587 }
9588
9589 this.request = this.vhs_.xhr({
9590 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
9591 method: utcTiming.method,
9592 withCredentials: this.withCredentials
9593 }, function (error, req) {
9594 // disposed
9595 if (!_this7.request) {
9596 return;
9597 }
9598
9599 if (error) {
9600 // sync request failed, fall back to using date header from mpd
9601 // TODO: log warning
9602 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
9603 return done();
9604 }
9605
9606 var serverTime;
9607
9608 if (utcTiming.method === 'HEAD') {
9609 if (!req.responseHeaders || !req.responseHeaders.date) {
9610 // expected date header not preset, fall back to using date header from mpd
9611 // TODO: log warning
9612 serverTime = _this7.masterLoaded_;
9613 } else {
9614 serverTime = Date.parse(req.responseHeaders.date);
9615 }
9616 } else {
9617 serverTime = Date.parse(req.responseText);
9618 }
9619
9620 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
9621 done();
9622 });
9623 };
9624
9625 _proto.haveMaster_ = function haveMaster_() {
9626 this.state = 'HAVE_MASTER';
9627
9628 if (this.isMaster_) {
9629 // We have the master playlist at this point, so
9630 // trigger this to allow MasterPlaylistController
9631 // to make an initial playlist selection
9632 this.trigger('loadedplaylist');
9633 } else if (!this.media_) {
9634 // no media playlist was specifically selected so select
9635 // the one the child playlist loader was created with
9636 this.media(this.childPlaylist_);
9637 }
9638 };
9639
9640 _proto.handleMaster_ = function handleMaster_() {
9641 // clear media request
9642 this.mediaRequest_ = null;
9643 var oldMaster = this.masterPlaylistLoader_.master;
9644 var newMaster = parseMasterXml({
9645 masterXml: this.masterPlaylistLoader_.masterXml_,
9646 srcUrl: this.masterPlaylistLoader_.srcUrl,
9647 clientOffset: this.masterPlaylistLoader_.clientOffset_,
9648 sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
9649 previousManifest: oldMaster
9650 }); // if we have an old master to compare the new master against
9651
9652 if (oldMaster) {
9653 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
9654 } // only update master if we have a new master
9655
9656
9657 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
9658 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
9659
9660 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
9661 this.masterPlaylistLoader_.srcUrl = location;
9662 }
9663
9664 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9665 this.updateMinimumUpdatePeriodTimeout_();
9666 }
9667
9668 return Boolean(newMaster);
9669 };
9670
9671 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
9672 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
9673 // a new one will be added if needed.
9674
9675 if (mpl.createMupOnMedia_) {
9676 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
9677 mpl.createMupOnMedia_ = null;
9678 } // clear any pending timeouts
9679
9680
9681 if (mpl.minimumUpdatePeriodTimeout_) {
9682 window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
9683 mpl.minimumUpdatePeriodTimeout_ = null;
9684 }
9685
9686 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
9687 // MPD has no future validity, so a new one will need to be acquired when new
9688 // media segments are to be made available. Thus, we use the target duration
9689 // in this case
9690
9691 if (mup === 0) {
9692 if (mpl.media()) {
9693 mup = mpl.media().targetDuration * 1000;
9694 } else {
9695 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
9696 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
9697 }
9698 } // if minimumUpdatePeriod is invalid or <= zero, which
9699 // can happen when a live video becomes VOD. skip timeout
9700 // creation.
9701
9702
9703 if (typeof mup !== 'number' || mup <= 0) {
9704 if (mup < 0) {
9705 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
9706 }
9707
9708 return;
9709 }
9710
9711 this.createMUPTimeout_(mup);
9712 };
9713
9714 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
9715 var mpl = this.masterPlaylistLoader_;
9716 mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(function () {
9717 mpl.minimumUpdatePeriodTimeout_ = null;
9718 mpl.trigger('minimumUpdatePeriod');
9719 mpl.createMUPTimeout_(mup);
9720 }, mup);
9721 }
9722 /**
9723 * Sends request to refresh the master xml and updates the parsed master manifest
9724 */
9725 ;
9726
9727 _proto.refreshXml_ = function refreshXml_() {
9728 var _this8 = this;
9729
9730 this.requestMaster_(function (req, masterChanged) {
9731 if (!masterChanged) {
9732 return;
9733 }
9734
9735 if (_this8.media_) {
9736 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
9737 } // This will filter out updated sidx info from the mapping
9738
9739
9740 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
9741
9742 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
9743 // TODO: do we need to reload the current playlist?
9744 _this8.refreshMedia_(_this8.media().id);
9745 });
9746 });
9747 }
9748 /**
9749 * Refreshes the media playlist by re-parsing the master xml and updating playlist
9750 * references. If this is an alternate loader, the updated parsed manifest is retrieved
9751 * from the master loader.
9752 */
9753 ;
9754
9755 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
9756 var _this9 = this;
9757
9758 if (!mediaID) {
9759 throw new Error('refreshMedia_ must take a media id');
9760 } // for master we have to reparse the master xml
9761 // to re-create segments based on current timing values
9762 // which may change media. We only skip updating master
9763 // if this is the first time this.media_ is being set.
9764 // as master was just parsed in that case.
9765
9766
9767 if (this.media_ && this.isMaster_) {
9768 this.handleMaster_();
9769 }
9770
9771 var playlists = this.masterPlaylistLoader_.master.playlists;
9772 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
9773
9774 if (mediaChanged) {
9775 this.media_ = playlists[mediaID];
9776 } else {
9777 this.trigger('playlistunchanged');
9778 }
9779
9780 if (!this.mediaUpdateTimeout) {
9781 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
9782 if (_this9.media().endList) {
9783 return;
9784 }
9785
9786 _this9.mediaUpdateTimeout = window.setTimeout(function () {
9787 _this9.trigger('mediaupdatetimeout');
9788
9789 createMediaUpdateTimeout();
9790 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
9791 };
9792
9793 createMediaUpdateTimeout();
9794 }
9795
9796 this.trigger('loadedplaylist');
9797 };
9798
9799 return DashPlaylistLoader;
9800 }(EventTarget);
9801
9802 var Config = {
9803 GOAL_BUFFER_LENGTH: 30,
9804 MAX_GOAL_BUFFER_LENGTH: 60,
9805 BACK_BUFFER_LENGTH: 30,
9806 GOAL_BUFFER_LENGTH_RATE: 1,
9807 // 0.5 MB/s
9808 INITIAL_BANDWIDTH: 4194304,
9809 // A fudge factor to apply to advertised playlist bitrates to account for
9810 // temporary flucations in client bandwidth
9811 BANDWIDTH_VARIANCE: 1.2,
9812 // How much of the buffer must be filled before we consider upswitching
9813 BUFFER_LOW_WATER_LINE: 0,
9814 MAX_BUFFER_LOW_WATER_LINE: 30,
9815 // TODO: Remove this when experimentalBufferBasedABR is removed
9816 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
9817 BUFFER_LOW_WATER_LINE_RATE: 1,
9818 // If the buffer is greater than the high water line, we won't switch down
9819 BUFFER_HIGH_WATER_LINE: 30
9820 };
9821
9822 var stringToArrayBuffer = function stringToArrayBuffer(string) {
9823 var view = new Uint8Array(new ArrayBuffer(string.length));
9824
9825 for (var i = 0; i < string.length; i++) {
9826 view[i] = string.charCodeAt(i);
9827 }
9828
9829 return view.buffer;
9830 };
9831
9832 /* global Blob, BlobBuilder, Worker */
9833 // unify worker interface
9834 var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
9835 // node only supports on/off
9836 workerObj.on = workerObj.addEventListener;
9837 workerObj.off = workerObj.removeEventListener;
9838 return workerObj;
9839 };
9840
9841 var createObjectURL = function createObjectURL(str) {
9842 try {
9843 return URL.createObjectURL(new Blob([str], {
9844 type: 'application/javascript'
9845 }));
9846 } catch (e) {
9847 var blob = new BlobBuilder();
9848 blob.append(str);
9849 return URL.createObjectURL(blob.getBlob());
9850 }
9851 };
9852
9853 var factory = function factory(code) {
9854 return function () {
9855 var objectUrl = createObjectURL(code);
9856 var worker = browserWorkerPolyFill(new Worker(objectUrl));
9857 worker.objURL = objectUrl;
9858 var terminate = worker.terminate;
9859 worker.on = worker.addEventListener;
9860 worker.off = worker.removeEventListener;
9861
9862 worker.terminate = function () {
9863 URL.revokeObjectURL(objectUrl);
9864 return terminate.call(this);
9865 };
9866
9867 return worker;
9868 };
9869 };
9870 var transform = function transform(code) {
9871 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
9872 };
9873
9874 var getWorkerString = function getWorkerString(fn) {
9875 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
9876 };
9877
9878 /* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
9879 var workerCode$1 = transform(getWorkerString(function () {
9880 /**
9881 * mux.js
9882 *
9883 * Copyright (c) Brightcove
9884 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9885 *
9886 * A lightweight readable stream implemention that handles event dispatching.
9887 * Objects that inherit from streams should call init in their constructors.
9888 */
9889
9890 var Stream = function Stream() {
9891 this.init = function () {
9892 var listeners = {};
9893 /**
9894 * Add a listener for a specified event type.
9895 * @param type {string} the event name
9896 * @param listener {function} the callback to be invoked when an event of
9897 * the specified type occurs
9898 */
9899
9900 this.on = function (type, listener) {
9901 if (!listeners[type]) {
9902 listeners[type] = [];
9903 }
9904
9905 listeners[type] = listeners[type].concat(listener);
9906 };
9907 /**
9908 * Remove a listener for a specified event type.
9909 * @param type {string} the event name
9910 * @param listener {function} a function previously registered for this
9911 * type of event through `on`
9912 */
9913
9914
9915 this.off = function (type, listener) {
9916 var index;
9917
9918 if (!listeners[type]) {
9919 return false;
9920 }
9921
9922 index = listeners[type].indexOf(listener);
9923 listeners[type] = listeners[type].slice();
9924 listeners[type].splice(index, 1);
9925 return index > -1;
9926 };
9927 /**
9928 * Trigger an event of the specified type on this stream. Any additional
9929 * arguments to this function are passed as parameters to event listeners.
9930 * @param type {string} the event name
9931 */
9932
9933
9934 this.trigger = function (type) {
9935 var callbacks, i, length, args;
9936 callbacks = listeners[type];
9937
9938 if (!callbacks) {
9939 return;
9940 } // Slicing the arguments on every invocation of this method
9941 // can add a significant amount of overhead. Avoid the
9942 // intermediate object creation for the common case of a
9943 // single callback argument
9944
9945
9946 if (arguments.length === 2) {
9947 length = callbacks.length;
9948
9949 for (i = 0; i < length; ++i) {
9950 callbacks[i].call(this, arguments[1]);
9951 }
9952 } else {
9953 args = [];
9954 i = arguments.length;
9955
9956 for (i = 1; i < arguments.length; ++i) {
9957 args.push(arguments[i]);
9958 }
9959
9960 length = callbacks.length;
9961
9962 for (i = 0; i < length; ++i) {
9963 callbacks[i].apply(this, args);
9964 }
9965 }
9966 };
9967 /**
9968 * Destroys the stream and cleans up.
9969 */
9970
9971
9972 this.dispose = function () {
9973 listeners = {};
9974 };
9975 };
9976 };
9977 /**
9978 * Forwards all `data` events on this stream to the destination stream. The
9979 * destination stream should provide a method `push` to receive the data
9980 * events as they arrive.
9981 * @param destination {stream} the stream that will receive all `data` events
9982 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
9983 * when the current stream emits a 'done' event
9984 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
9985 */
9986
9987
9988 Stream.prototype.pipe = function (destination) {
9989 this.on('data', function (data) {
9990 destination.push(data);
9991 });
9992 this.on('done', function (flushSource) {
9993 destination.flush(flushSource);
9994 });
9995 this.on('partialdone', function (flushSource) {
9996 destination.partialFlush(flushSource);
9997 });
9998 this.on('endedtimeline', function (flushSource) {
9999 destination.endTimeline(flushSource);
10000 });
10001 this.on('reset', function (flushSource) {
10002 destination.reset(flushSource);
10003 });
10004 return destination;
10005 }; // Default stream functions that are expected to be overridden to perform
10006 // actual work. These are provided by the prototype as a sort of no-op
10007 // implementation so that we don't have to check for their existence in the
10008 // `pipe` function above.
10009
10010
10011 Stream.prototype.push = function (data) {
10012 this.trigger('data', data);
10013 };
10014
10015 Stream.prototype.flush = function (flushSource) {
10016 this.trigger('done', flushSource);
10017 };
10018
10019 Stream.prototype.partialFlush = function (flushSource) {
10020 this.trigger('partialdone', flushSource);
10021 };
10022
10023 Stream.prototype.endTimeline = function (flushSource) {
10024 this.trigger('endedtimeline', flushSource);
10025 };
10026
10027 Stream.prototype.reset = function (flushSource) {
10028 this.trigger('reset', flushSource);
10029 };
10030
10031 var stream = Stream;
10032 var MAX_UINT32$1 = Math.pow(2, 32);
10033
10034 var getUint64$2 = function getUint64(uint8) {
10035 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
10036 var value;
10037
10038 if (dv.getBigUint64) {
10039 value = dv.getBigUint64(0);
10040
10041 if (value < Number.MAX_SAFE_INTEGER) {
10042 return Number(value);
10043 }
10044
10045 return value;
10046 }
10047
10048 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
10049 };
10050
10051 var numbers = {
10052 getUint64: getUint64$2,
10053 MAX_UINT32: MAX_UINT32$1
10054 };
10055 var MAX_UINT32 = numbers.MAX_UINT32;
10056 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
10057
10058 (function () {
10059 var i;
10060 types = {
10061 avc1: [],
10062 // codingname
10063 avcC: [],
10064 btrt: [],
10065 dinf: [],
10066 dref: [],
10067 esds: [],
10068 ftyp: [],
10069 hdlr: [],
10070 mdat: [],
10071 mdhd: [],
10072 mdia: [],
10073 mfhd: [],
10074 minf: [],
10075 moof: [],
10076 moov: [],
10077 mp4a: [],
10078 // codingname
10079 mvex: [],
10080 mvhd: [],
10081 pasp: [],
10082 sdtp: [],
10083 smhd: [],
10084 stbl: [],
10085 stco: [],
10086 stsc: [],
10087 stsd: [],
10088 stsz: [],
10089 stts: [],
10090 styp: [],
10091 tfdt: [],
10092 tfhd: [],
10093 traf: [],
10094 trak: [],
10095 trun: [],
10096 trex: [],
10097 tkhd: [],
10098 vmhd: []
10099 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
10100 // don't throw an error
10101
10102 if (typeof Uint8Array === 'undefined') {
10103 return;
10104 }
10105
10106 for (i in types) {
10107 if (types.hasOwnProperty(i)) {
10108 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
10109 }
10110 }
10111
10112 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
10113 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
10114 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
10115 VIDEO_HDLR = new Uint8Array([0x00, // version 0
10116 0x00, 0x00, 0x00, // flags
10117 0x00, 0x00, 0x00, 0x00, // pre_defined
10118 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
10119 0x00, 0x00, 0x00, 0x00, // reserved
10120 0x00, 0x00, 0x00, 0x00, // reserved
10121 0x00, 0x00, 0x00, 0x00, // reserved
10122 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
10123 ]);
10124 AUDIO_HDLR = new Uint8Array([0x00, // version 0
10125 0x00, 0x00, 0x00, // flags
10126 0x00, 0x00, 0x00, 0x00, // pre_defined
10127 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
10128 0x00, 0x00, 0x00, 0x00, // reserved
10129 0x00, 0x00, 0x00, 0x00, // reserved
10130 0x00, 0x00, 0x00, 0x00, // reserved
10131 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
10132 ]);
10133 HDLR_TYPES = {
10134 video: VIDEO_HDLR,
10135 audio: AUDIO_HDLR
10136 };
10137 DREF = new Uint8Array([0x00, // version 0
10138 0x00, 0x00, 0x00, // flags
10139 0x00, 0x00, 0x00, 0x01, // entry_count
10140 0x00, 0x00, 0x00, 0x0c, // entry_size
10141 0x75, 0x72, 0x6c, 0x20, // 'url' type
10142 0x00, // version 0
10143 0x00, 0x00, 0x01 // entry_flags
10144 ]);
10145 SMHD = new Uint8Array([0x00, // version
10146 0x00, 0x00, 0x00, // flags
10147 0x00, 0x00, // balance, 0 means centered
10148 0x00, 0x00 // reserved
10149 ]);
10150 STCO = new Uint8Array([0x00, // version
10151 0x00, 0x00, 0x00, // flags
10152 0x00, 0x00, 0x00, 0x00 // entry_count
10153 ]);
10154 STSC = STCO;
10155 STSZ = new Uint8Array([0x00, // version
10156 0x00, 0x00, 0x00, // flags
10157 0x00, 0x00, 0x00, 0x00, // sample_size
10158 0x00, 0x00, 0x00, 0x00 // sample_count
10159 ]);
10160 STTS = STCO;
10161 VMHD = new Uint8Array([0x00, // version
10162 0x00, 0x00, 0x01, // flags
10163 0x00, 0x00, // graphicsmode
10164 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
10165 ]);
10166 })();
10167
10168 box = function box(type) {
10169 var payload = [],
10170 size = 0,
10171 i,
10172 result,
10173 view;
10174
10175 for (i = 1; i < arguments.length; i++) {
10176 payload.push(arguments[i]);
10177 }
10178
10179 i = payload.length; // calculate the total size we need to allocate
10180
10181 while (i--) {
10182 size += payload[i].byteLength;
10183 }
10184
10185 result = new Uint8Array(size + 8);
10186 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
10187 view.setUint32(0, result.byteLength);
10188 result.set(type, 4); // copy the payload into the result
10189
10190 for (i = 0, size = 8; i < payload.length; i++) {
10191 result.set(payload[i], size);
10192 size += payload[i].byteLength;
10193 }
10194
10195 return result;
10196 };
10197
10198 dinf = function dinf() {
10199 return box(types.dinf, box(types.dref, DREF));
10200 };
10201
10202 esds = function esds(track) {
10203 return box(types.esds, new Uint8Array([0x00, // version
10204 0x00, 0x00, 0x00, // flags
10205 // ES_Descriptor
10206 0x03, // tag, ES_DescrTag
10207 0x19, // length
10208 0x00, 0x00, // ES_ID
10209 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
10210 // DecoderConfigDescriptor
10211 0x04, // tag, DecoderConfigDescrTag
10212 0x11, // length
10213 0x40, // object type
10214 0x15, // streamType
10215 0x00, 0x06, 0x00, // bufferSizeDB
10216 0x00, 0x00, 0xda, 0xc0, // maxBitrate
10217 0x00, 0x00, 0xda, 0xc0, // avgBitrate
10218 // DecoderSpecificInfo
10219 0x05, // tag, DecoderSpecificInfoTag
10220 0x02, // length
10221 // ISO/IEC 14496-3, AudioSpecificConfig
10222 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
10223 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
10224 ]));
10225 };
10226
10227 ftyp = function ftyp() {
10228 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
10229 };
10230
10231 hdlr = function hdlr(type) {
10232 return box(types.hdlr, HDLR_TYPES[type]);
10233 };
10234
10235 mdat = function mdat(data) {
10236 return box(types.mdat, data);
10237 };
10238
10239 mdhd = function mdhd(track) {
10240 var result = new Uint8Array([0x00, // version 0
10241 0x00, 0x00, 0x00, // flags
10242 0x00, 0x00, 0x00, 0x02, // creation_time
10243 0x00, 0x00, 0x00, 0x03, // modification_time
10244 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10245 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
10246 0x55, 0xc4, // 'und' language (undetermined)
10247 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
10248 // defined. The sample rate can be parsed out of an ADTS header, for
10249 // instance.
10250
10251 if (track.samplerate) {
10252 result[12] = track.samplerate >>> 24 & 0xFF;
10253 result[13] = track.samplerate >>> 16 & 0xFF;
10254 result[14] = track.samplerate >>> 8 & 0xFF;
10255 result[15] = track.samplerate & 0xFF;
10256 }
10257
10258 return box(types.mdhd, result);
10259 };
10260
10261 mdia = function mdia(track) {
10262 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
10263 };
10264
10265 mfhd = function mfhd(sequenceNumber) {
10266 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
10267 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
10268 ]));
10269 };
10270
10271 minf = function minf(track) {
10272 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
10273 };
10274
10275 moof = function moof(sequenceNumber, tracks) {
10276 var trackFragments = [],
10277 i = tracks.length; // build traf boxes for each track fragment
10278
10279 while (i--) {
10280 trackFragments[i] = traf(tracks[i]);
10281 }
10282
10283 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
10284 };
10285 /**
10286 * Returns a movie box.
10287 * @param tracks {array} the tracks associated with this movie
10288 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
10289 */
10290
10291
10292 moov = function moov(tracks) {
10293 var i = tracks.length,
10294 boxes = [];
10295
10296 while (i--) {
10297 boxes[i] = trak(tracks[i]);
10298 }
10299
10300 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
10301 };
10302
10303 mvex = function mvex(tracks) {
10304 var i = tracks.length,
10305 boxes = [];
10306
10307 while (i--) {
10308 boxes[i] = trex(tracks[i]);
10309 }
10310
10311 return box.apply(null, [types.mvex].concat(boxes));
10312 };
10313
10314 mvhd = function mvhd(duration) {
10315 var bytes = new Uint8Array([0x00, // version 0
10316 0x00, 0x00, 0x00, // flags
10317 0x00, 0x00, 0x00, 0x01, // creation_time
10318 0x00, 0x00, 0x00, 0x02, // modification_time
10319 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10320 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
10321 0x00, 0x01, 0x00, 0x00, // 1.0 rate
10322 0x01, 0x00, // 1.0 volume
10323 0x00, 0x00, // reserved
10324 0x00, 0x00, 0x00, 0x00, // reserved
10325 0x00, 0x00, 0x00, 0x00, // reserved
10326 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10327 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10328 0xff, 0xff, 0xff, 0xff // next_track_ID
10329 ]);
10330 return box(types.mvhd, bytes);
10331 };
10332
10333 sdtp = function sdtp(track) {
10334 var samples = track.samples || [],
10335 bytes = new Uint8Array(4 + samples.length),
10336 flags,
10337 i; // leave the full box header (4 bytes) all zero
10338 // write the sample table
10339
10340 for (i = 0; i < samples.length; i++) {
10341 flags = samples[i].flags;
10342 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
10343 }
10344
10345 return box(types.sdtp, bytes);
10346 };
10347
10348 stbl = function stbl(track) {
10349 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
10350 };
10351
10352 (function () {
10353 var videoSample, audioSample;
10354
10355 stsd = function stsd(track) {
10356 return box(types.stsd, new Uint8Array([0x00, // version 0
10357 0x00, 0x00, 0x00, // flags
10358 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
10359 };
10360
10361 videoSample = function videoSample(track) {
10362 var sps = track.sps || [],
10363 pps = track.pps || [],
10364 sequenceParameterSets = [],
10365 pictureParameterSets = [],
10366 i,
10367 avc1Box; // assemble the SPSs
10368
10369 for (i = 0; i < sps.length; i++) {
10370 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
10371 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
10372
10373 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
10374 } // assemble the PPSs
10375
10376
10377 for (i = 0; i < pps.length; i++) {
10378 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
10379 pictureParameterSets.push(pps[i].byteLength & 0xFF);
10380 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
10381 }
10382
10383 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10384 0x00, 0x01, // data_reference_index
10385 0x00, 0x00, // pre_defined
10386 0x00, 0x00, // reserved
10387 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10388 (track.width & 0xff00) >> 8, track.width & 0xff, // width
10389 (track.height & 0xff00) >> 8, track.height & 0xff, // height
10390 0x00, 0x48, 0x00, 0x00, // horizresolution
10391 0x00, 0x48, 0x00, 0x00, // vertresolution
10392 0x00, 0x00, 0x00, 0x00, // reserved
10393 0x00, 0x01, // frame_count
10394 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
10395 0x00, 0x18, // depth = 24
10396 0x11, 0x11 // pre_defined = -1
10397 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
10398 track.profileIdc, // AVCProfileIndication
10399 track.profileCompatibility, // profile_compatibility
10400 track.levelIdc, // AVCLevelIndication
10401 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
10402 ].concat([sps.length], // numOfSequenceParameterSets
10403 sequenceParameterSets, // "SPS"
10404 [pps.length], // numOfPictureParameterSets
10405 pictureParameterSets // "PPS"
10406 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
10407 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
10408 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
10409 ]))];
10410
10411 if (track.sarRatio) {
10412 var hSpacing = track.sarRatio[0],
10413 vSpacing = track.sarRatio[1];
10414 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
10415 }
10416
10417 return box.apply(null, avc1Box);
10418 };
10419
10420 audioSample = function audioSample(track) {
10421 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
10422 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10423 0x00, 0x01, // data_reference_index
10424 // AudioSampleEntry, ISO/IEC 14496-12
10425 0x00, 0x00, 0x00, 0x00, // reserved
10426 0x00, 0x00, 0x00, 0x00, // reserved
10427 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
10428 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
10429 0x00, 0x00, // pre_defined
10430 0x00, 0x00, // reserved
10431 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
10432 // MP4AudioSampleEntry, ISO/IEC 14496-14
10433 ]), esds(track));
10434 };
10435 })();
10436
10437 tkhd = function tkhd(track) {
10438 var result = new Uint8Array([0x00, // version 0
10439 0x00, 0x00, 0x07, // flags
10440 0x00, 0x00, 0x00, 0x00, // creation_time
10441 0x00, 0x00, 0x00, 0x00, // modification_time
10442 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10443 0x00, 0x00, 0x00, 0x00, // reserved
10444 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
10445 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10446 0x00, 0x00, // layer
10447 0x00, 0x00, // alternate_group
10448 0x01, 0x00, // non-audio track volume
10449 0x00, 0x00, // reserved
10450 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10451 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
10452 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
10453 ]);
10454 return box(types.tkhd, result);
10455 };
10456 /**
10457 * Generate a track fragment (traf) box. A traf box collects metadata
10458 * about tracks in a movie fragment (moof) box.
10459 */
10460
10461
10462 traf = function traf(track) {
10463 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
10464 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
10465 0x00, 0x00, 0x3a, // flags
10466 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10467 0x00, 0x00, 0x00, 0x01, // sample_description_index
10468 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10469 0x00, 0x00, 0x00, 0x00, // default_sample_size
10470 0x00, 0x00, 0x00, 0x00 // default_sample_flags
10471 ]));
10472 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
10473 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
10474 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
10475 0x00, 0x00, 0x00, // flags
10476 // baseMediaDecodeTime
10477 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
10478 // the containing moof to the first payload byte of the associated
10479 // mdat
10480
10481 dataOffset = 32 + // tfhd
10482 20 + // tfdt
10483 8 + // traf header
10484 16 + // mfhd
10485 8 + // moof header
10486 8; // mdat header
10487 // audio tracks require less metadata
10488
10489 if (track.type === 'audio') {
10490 trackFragmentRun = trun$1(track, dataOffset);
10491 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
10492 } // video tracks should contain an independent and disposable samples
10493 // box (sdtp)
10494 // generate one and adjust offsets to match
10495
10496
10497 sampleDependencyTable = sdtp(track);
10498 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
10499 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
10500 };
10501 /**
10502 * Generate a track box.
10503 * @param track {object} a track definition
10504 * @return {Uint8Array} the track box
10505 */
10506
10507
10508 trak = function trak(track) {
10509 track.duration = track.duration || 0xffffffff;
10510 return box(types.trak, tkhd(track), mdia(track));
10511 };
10512
10513 trex = function trex(track) {
10514 var result = new Uint8Array([0x00, // version 0
10515 0x00, 0x00, 0x00, // flags
10516 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10517 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
10518 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10519 0x00, 0x00, 0x00, 0x00, // default_sample_size
10520 0x00, 0x01, 0x00, 0x01 // default_sample_flags
10521 ]); // the last two bytes of default_sample_flags is the sample
10522 // degradation priority, a hint about the importance of this sample
10523 // relative to others. Lower the degradation priority for all sample
10524 // types other than video.
10525
10526 if (track.type !== 'video') {
10527 result[result.length - 1] = 0x00;
10528 }
10529
10530 return box(types.trex, result);
10531 };
10532
10533 (function () {
10534 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
10535 // duration is present for the first sample, it will be present for
10536 // all subsequent samples.
10537 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
10538
10539 trunHeader = function trunHeader(samples, offset) {
10540 var durationPresent = 0,
10541 sizePresent = 0,
10542 flagsPresent = 0,
10543 compositionTimeOffset = 0; // trun flag constants
10544
10545 if (samples.length) {
10546 if (samples[0].duration !== undefined) {
10547 durationPresent = 0x1;
10548 }
10549
10550 if (samples[0].size !== undefined) {
10551 sizePresent = 0x2;
10552 }
10553
10554 if (samples[0].flags !== undefined) {
10555 flagsPresent = 0x4;
10556 }
10557
10558 if (samples[0].compositionTimeOffset !== undefined) {
10559 compositionTimeOffset = 0x8;
10560 }
10561 }
10562
10563 return [0x00, // version 0
10564 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
10565 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
10566 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
10567 ];
10568 };
10569
10570 videoTrun = function videoTrun(track, offset) {
10571 var bytesOffest, bytes, header, samples, sample, i;
10572 samples = track.samples || [];
10573 offset += 8 + 12 + 16 * samples.length;
10574 header = trunHeader(samples, offset);
10575 bytes = new Uint8Array(header.length + samples.length * 16);
10576 bytes.set(header);
10577 bytesOffest = header.length;
10578
10579 for (i = 0; i < samples.length; i++) {
10580 sample = samples[i];
10581 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10582 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10583 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10584 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10585
10586 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10587 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10588 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10589 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10590
10591 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
10592 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
10593 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
10594 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
10595
10596 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
10597 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
10598 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
10599 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
10600 }
10601
10602 return box(types.trun, bytes);
10603 };
10604
10605 audioTrun = function audioTrun(track, offset) {
10606 var bytes, bytesOffest, header, samples, sample, i;
10607 samples = track.samples || [];
10608 offset += 8 + 12 + 8 * samples.length;
10609 header = trunHeader(samples, offset);
10610 bytes = new Uint8Array(header.length + samples.length * 8);
10611 bytes.set(header);
10612 bytesOffest = header.length;
10613
10614 for (i = 0; i < samples.length; i++) {
10615 sample = samples[i];
10616 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10617 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10618 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10619 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10620
10621 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10622 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10623 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10624 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10625 }
10626
10627 return box(types.trun, bytes);
10628 };
10629
10630 trun$1 = function trun(track, offset) {
10631 if (track.type === 'audio') {
10632 return audioTrun(track, offset);
10633 }
10634
10635 return videoTrun(track, offset);
10636 };
10637 })();
10638
10639 var mp4Generator = {
10640 ftyp: ftyp,
10641 mdat: mdat,
10642 moof: moof,
10643 moov: moov,
10644 initSegment: function initSegment(tracks) {
10645 var fileType = ftyp(),
10646 movie = moov(tracks),
10647 result;
10648 result = new Uint8Array(fileType.byteLength + movie.byteLength);
10649 result.set(fileType);
10650 result.set(movie, fileType.byteLength);
10651 return result;
10652 }
10653 };
10654 /**
10655 * mux.js
10656 *
10657 * Copyright (c) Brightcove
10658 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10659 */
10660 // Convert an array of nal units into an array of frames with each frame being
10661 // composed of the nal units that make up that frame
10662 // Also keep track of cummulative data about the frame from the nal units such
10663 // as the frame duration, starting pts, etc.
10664
10665 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
10666 var i,
10667 currentNal,
10668 currentFrame = [],
10669 frames = []; // TODO added for LHLS, make sure this is OK
10670
10671 frames.byteLength = 0;
10672 frames.nalCount = 0;
10673 frames.duration = 0;
10674 currentFrame.byteLength = 0;
10675
10676 for (i = 0; i < nalUnits.length; i++) {
10677 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
10678
10679 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
10680 // Since the very first nal unit is expected to be an AUD
10681 // only push to the frames array when currentFrame is not empty
10682 if (currentFrame.length) {
10683 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
10684
10685 frames.byteLength += currentFrame.byteLength;
10686 frames.nalCount += currentFrame.length;
10687 frames.duration += currentFrame.duration;
10688 frames.push(currentFrame);
10689 }
10690
10691 currentFrame = [currentNal];
10692 currentFrame.byteLength = currentNal.data.byteLength;
10693 currentFrame.pts = currentNal.pts;
10694 currentFrame.dts = currentNal.dts;
10695 } else {
10696 // Specifically flag key frames for ease of use later
10697 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
10698 currentFrame.keyFrame = true;
10699 }
10700
10701 currentFrame.duration = currentNal.dts - currentFrame.dts;
10702 currentFrame.byteLength += currentNal.data.byteLength;
10703 currentFrame.push(currentNal);
10704 }
10705 } // For the last frame, use the duration of the previous frame if we
10706 // have nothing better to go on
10707
10708
10709 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
10710 currentFrame.duration = frames[frames.length - 1].duration;
10711 } // Push the final frame
10712 // TODO added for LHLS, make sure this is OK
10713
10714
10715 frames.byteLength += currentFrame.byteLength;
10716 frames.nalCount += currentFrame.length;
10717 frames.duration += currentFrame.duration;
10718 frames.push(currentFrame);
10719 return frames;
10720 }; // Convert an array of frames into an array of Gop with each Gop being composed
10721 // of the frames that make up that Gop
10722 // Also keep track of cummulative data about the Gop from the frames such as the
10723 // Gop duration, starting pts, etc.
10724
10725
10726 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
10727 var i,
10728 currentFrame,
10729 currentGop = [],
10730 gops = []; // We must pre-set some of the values on the Gop since we
10731 // keep running totals of these values
10732
10733 currentGop.byteLength = 0;
10734 currentGop.nalCount = 0;
10735 currentGop.duration = 0;
10736 currentGop.pts = frames[0].pts;
10737 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
10738
10739 gops.byteLength = 0;
10740 gops.nalCount = 0;
10741 gops.duration = 0;
10742 gops.pts = frames[0].pts;
10743 gops.dts = frames[0].dts;
10744
10745 for (i = 0; i < frames.length; i++) {
10746 currentFrame = frames[i];
10747
10748 if (currentFrame.keyFrame) {
10749 // Since the very first frame is expected to be an keyframe
10750 // only push to the gops array when currentGop is not empty
10751 if (currentGop.length) {
10752 gops.push(currentGop);
10753 gops.byteLength += currentGop.byteLength;
10754 gops.nalCount += currentGop.nalCount;
10755 gops.duration += currentGop.duration;
10756 }
10757
10758 currentGop = [currentFrame];
10759 currentGop.nalCount = currentFrame.length;
10760 currentGop.byteLength = currentFrame.byteLength;
10761 currentGop.pts = currentFrame.pts;
10762 currentGop.dts = currentFrame.dts;
10763 currentGop.duration = currentFrame.duration;
10764 } else {
10765 currentGop.duration += currentFrame.duration;
10766 currentGop.nalCount += currentFrame.length;
10767 currentGop.byteLength += currentFrame.byteLength;
10768 currentGop.push(currentFrame);
10769 }
10770 }
10771
10772 if (gops.length && currentGop.duration <= 0) {
10773 currentGop.duration = gops[gops.length - 1].duration;
10774 }
10775
10776 gops.byteLength += currentGop.byteLength;
10777 gops.nalCount += currentGop.nalCount;
10778 gops.duration += currentGop.duration; // push the final Gop
10779
10780 gops.push(currentGop);
10781 return gops;
10782 };
10783 /*
10784 * Search for the first keyframe in the GOPs and throw away all frames
10785 * until that keyframe. Then extend the duration of the pulled keyframe
10786 * and pull the PTS and DTS of the keyframe so that it covers the time
10787 * range of the frames that were disposed.
10788 *
10789 * @param {Array} gops video GOPs
10790 * @returns {Array} modified video GOPs
10791 */
10792
10793
10794 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
10795 var currentGop;
10796
10797 if (!gops[0][0].keyFrame && gops.length > 1) {
10798 // Remove the first GOP
10799 currentGop = gops.shift();
10800 gops.byteLength -= currentGop.byteLength;
10801 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
10802 // first gop to cover the time period of the
10803 // frames we just removed
10804
10805 gops[0][0].dts = currentGop.dts;
10806 gops[0][0].pts = currentGop.pts;
10807 gops[0][0].duration += currentGop.duration;
10808 }
10809
10810 return gops;
10811 };
10812 /**
10813 * Default sample object
10814 * see ISO/IEC 14496-12:2012, section 8.6.4.3
10815 */
10816
10817
10818 var createDefaultSample = function createDefaultSample() {
10819 return {
10820 size: 0,
10821 flags: {
10822 isLeading: 0,
10823 dependsOn: 1,
10824 isDependedOn: 0,
10825 hasRedundancy: 0,
10826 degradationPriority: 0,
10827 isNonSyncSample: 1
10828 }
10829 };
10830 };
10831 /*
10832 * Collates information from a video frame into an object for eventual
10833 * entry into an MP4 sample table.
10834 *
10835 * @param {Object} frame the video frame
10836 * @param {Number} dataOffset the byte offset to position the sample
10837 * @return {Object} object containing sample table info for a frame
10838 */
10839
10840
10841 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
10842 var sample = createDefaultSample();
10843 sample.dataOffset = dataOffset;
10844 sample.compositionTimeOffset = frame.pts - frame.dts;
10845 sample.duration = frame.duration;
10846 sample.size = 4 * frame.length; // Space for nal unit size
10847
10848 sample.size += frame.byteLength;
10849
10850 if (frame.keyFrame) {
10851 sample.flags.dependsOn = 2;
10852 sample.flags.isNonSyncSample = 0;
10853 }
10854
10855 return sample;
10856 }; // generate the track's sample table from an array of gops
10857
10858
10859 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
10860 var h,
10861 i,
10862 sample,
10863 currentGop,
10864 currentFrame,
10865 dataOffset = baseDataOffset || 0,
10866 samples = [];
10867
10868 for (h = 0; h < gops.length; h++) {
10869 currentGop = gops[h];
10870
10871 for (i = 0; i < currentGop.length; i++) {
10872 currentFrame = currentGop[i];
10873 sample = sampleForFrame(currentFrame, dataOffset);
10874 dataOffset += sample.size;
10875 samples.push(sample);
10876 }
10877 }
10878
10879 return samples;
10880 }; // generate the track's raw mdat data from an array of gops
10881
10882
10883 var concatenateNalData = function concatenateNalData(gops) {
10884 var h,
10885 i,
10886 j,
10887 currentGop,
10888 currentFrame,
10889 currentNal,
10890 dataOffset = 0,
10891 nalsByteLength = gops.byteLength,
10892 numberOfNals = gops.nalCount,
10893 totalByteLength = nalsByteLength + 4 * numberOfNals,
10894 data = new Uint8Array(totalByteLength),
10895 view = new DataView(data.buffer); // For each Gop..
10896
10897 for (h = 0; h < gops.length; h++) {
10898 currentGop = gops[h]; // For each Frame..
10899
10900 for (i = 0; i < currentGop.length; i++) {
10901 currentFrame = currentGop[i]; // For each NAL..
10902
10903 for (j = 0; j < currentFrame.length; j++) {
10904 currentNal = currentFrame[j];
10905 view.setUint32(dataOffset, currentNal.data.byteLength);
10906 dataOffset += 4;
10907 data.set(currentNal.data, dataOffset);
10908 dataOffset += currentNal.data.byteLength;
10909 }
10910 }
10911 }
10912
10913 return data;
10914 }; // generate the track's sample table from a frame
10915
10916
10917 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
10918 var sample,
10919 dataOffset = baseDataOffset || 0,
10920 samples = [];
10921 sample = sampleForFrame(frame, dataOffset);
10922 samples.push(sample);
10923 return samples;
10924 }; // generate the track's raw mdat data from a frame
10925
10926
10927 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
10928 var i,
10929 currentNal,
10930 dataOffset = 0,
10931 nalsByteLength = frame.byteLength,
10932 numberOfNals = frame.length,
10933 totalByteLength = nalsByteLength + 4 * numberOfNals,
10934 data = new Uint8Array(totalByteLength),
10935 view = new DataView(data.buffer); // For each NAL..
10936
10937 for (i = 0; i < frame.length; i++) {
10938 currentNal = frame[i];
10939 view.setUint32(dataOffset, currentNal.data.byteLength);
10940 dataOffset += 4;
10941 data.set(currentNal.data, dataOffset);
10942 dataOffset += currentNal.data.byteLength;
10943 }
10944
10945 return data;
10946 };
10947
10948 var frameUtils = {
10949 groupNalsIntoFrames: groupNalsIntoFrames,
10950 groupFramesIntoGops: groupFramesIntoGops,
10951 extendFirstKeyFrame: extendFirstKeyFrame,
10952 generateSampleTable: generateSampleTable$1,
10953 concatenateNalData: concatenateNalData,
10954 generateSampleTableForFrame: generateSampleTableForFrame,
10955 concatenateNalDataForFrame: concatenateNalDataForFrame
10956 };
10957 /**
10958 * mux.js
10959 *
10960 * Copyright (c) Brightcove
10961 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10962 */
10963
10964 var highPrefix = [33, 16, 5, 32, 164, 27];
10965 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
10966
10967 var zeroFill = function zeroFill(count) {
10968 var a = [];
10969
10970 while (count--) {
10971 a.push(0);
10972 }
10973
10974 return a;
10975 };
10976
10977 var makeTable = function makeTable(metaTable) {
10978 return Object.keys(metaTable).reduce(function (obj, key) {
10979 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
10980 return arr.concat(part);
10981 }, []));
10982 return obj;
10983 }, {});
10984 };
10985
10986 var silence;
10987
10988 var silence_1 = function silence_1() {
10989 if (!silence) {
10990 // Frames-of-silence to use for filling in missing AAC frames
10991 var coneOfSilence = {
10992 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
10993 88200: [highPrefix, [231], zeroFill(170), [56]],
10994 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
10995 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
10996 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
10997 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
10998 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
10999 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
11000 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
11001 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
11002 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
11003 };
11004 silence = makeTable(coneOfSilence);
11005 }
11006
11007 return silence;
11008 };
11009 /**
11010 * mux.js
11011 *
11012 * Copyright (c) Brightcove
11013 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11014 */
11015
11016
11017 var ONE_SECOND_IN_TS$4 = 90000,
11018 // 90kHz clock
11019 secondsToVideoTs,
11020 secondsToAudioTs,
11021 videoTsToSeconds,
11022 audioTsToSeconds,
11023 audioTsToVideoTs,
11024 videoTsToAudioTs,
11025 metadataTsToSeconds;
11026
11027 secondsToVideoTs = function secondsToVideoTs(seconds) {
11028 return seconds * ONE_SECOND_IN_TS$4;
11029 };
11030
11031 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
11032 return seconds * sampleRate;
11033 };
11034
11035 videoTsToSeconds = function videoTsToSeconds(timestamp) {
11036 return timestamp / ONE_SECOND_IN_TS$4;
11037 };
11038
11039 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
11040 return timestamp / sampleRate;
11041 };
11042
11043 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
11044 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
11045 };
11046
11047 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
11048 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
11049 };
11050 /**
11051 * Adjust ID3 tag or caption timing information by the timeline pts values
11052 * (if keepOriginalTimestamps is false) and convert to seconds
11053 */
11054
11055
11056 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
11057 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
11058 };
11059
11060 var clock = {
11061 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
11062 secondsToVideoTs: secondsToVideoTs,
11063 secondsToAudioTs: secondsToAudioTs,
11064 videoTsToSeconds: videoTsToSeconds,
11065 audioTsToSeconds: audioTsToSeconds,
11066 audioTsToVideoTs: audioTsToVideoTs,
11067 videoTsToAudioTs: videoTsToAudioTs,
11068 metadataTsToSeconds: metadataTsToSeconds
11069 };
11070 /**
11071 * mux.js
11072 *
11073 * Copyright (c) Brightcove
11074 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11075 */
11076
11077 /**
11078 * Sum the `byteLength` properties of the data in each AAC frame
11079 */
11080
11081 var sumFrameByteLengths = function sumFrameByteLengths(array) {
11082 var i,
11083 currentObj,
11084 sum = 0; // sum the byteLength's all each nal unit in the frame
11085
11086 for (i = 0; i < array.length; i++) {
11087 currentObj = array[i];
11088 sum += currentObj.data.byteLength;
11089 }
11090
11091 return sum;
11092 }; // Possibly pad (prefix) the audio track with silence if appending this track
11093 // would lead to the introduction of a gap in the audio buffer
11094
11095
11096 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
11097 var baseMediaDecodeTimeTs,
11098 frameDuration = 0,
11099 audioGapDuration = 0,
11100 audioFillFrameCount = 0,
11101 audioFillDuration = 0,
11102 silentFrame,
11103 i,
11104 firstFrame;
11105
11106 if (!frames.length) {
11107 return;
11108 }
11109
11110 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
11111
11112 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
11113
11114 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
11115 // insert the shortest possible amount (audio gap or audio to video gap)
11116 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
11117
11118 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
11119 audioFillDuration = audioFillFrameCount * frameDuration;
11120 } // don't attempt to fill gaps smaller than a single frame or larger
11121 // than a half second
11122
11123
11124 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
11125 return;
11126 }
11127
11128 silentFrame = silence_1()[track.samplerate];
11129
11130 if (!silentFrame) {
11131 // we don't have a silent frame pregenerated for the sample rate, so use a frame
11132 // from the content instead
11133 silentFrame = frames[0].data;
11134 }
11135
11136 for (i = 0; i < audioFillFrameCount; i++) {
11137 firstFrame = frames[0];
11138 frames.splice(0, 0, {
11139 data: silentFrame,
11140 dts: firstFrame.dts - frameDuration,
11141 pts: firstFrame.pts - frameDuration
11142 });
11143 }
11144
11145 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
11146 return audioFillDuration;
11147 }; // If the audio segment extends before the earliest allowed dts
11148 // value, remove AAC frames until starts at or after the earliest
11149 // allowed DTS so that we don't end up with a negative baseMedia-
11150 // DecodeTime for the audio track
11151
11152
11153 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
11154 if (track.minSegmentDts >= earliestAllowedDts) {
11155 return adtsFrames;
11156 } // We will need to recalculate the earliest segment Dts
11157
11158
11159 track.minSegmentDts = Infinity;
11160 return adtsFrames.filter(function (currentFrame) {
11161 // If this is an allowed frame, keep it and record it's Dts
11162 if (currentFrame.dts >= earliestAllowedDts) {
11163 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
11164 track.minSegmentPts = track.minSegmentDts;
11165 return true;
11166 } // Otherwise, discard it
11167
11168
11169 return false;
11170 });
11171 }; // generate the track's raw mdat data from an array of frames
11172
11173
11174 var generateSampleTable = function generateSampleTable(frames) {
11175 var i,
11176 currentFrame,
11177 samples = [];
11178
11179 for (i = 0; i < frames.length; i++) {
11180 currentFrame = frames[i];
11181 samples.push({
11182 size: currentFrame.data.byteLength,
11183 duration: 1024 // For AAC audio, all samples contain 1024 samples
11184
11185 });
11186 }
11187
11188 return samples;
11189 }; // generate the track's sample table from an array of frames
11190
11191
11192 var concatenateFrameData = function concatenateFrameData(frames) {
11193 var i,
11194 currentFrame,
11195 dataOffset = 0,
11196 data = new Uint8Array(sumFrameByteLengths(frames));
11197
11198 for (i = 0; i < frames.length; i++) {
11199 currentFrame = frames[i];
11200 data.set(currentFrame.data, dataOffset);
11201 dataOffset += currentFrame.data.byteLength;
11202 }
11203
11204 return data;
11205 };
11206
11207 var audioFrameUtils = {
11208 prefixWithSilence: prefixWithSilence,
11209 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
11210 generateSampleTable: generateSampleTable,
11211 concatenateFrameData: concatenateFrameData
11212 };
11213 /**
11214 * mux.js
11215 *
11216 * Copyright (c) Brightcove
11217 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11218 */
11219
11220 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
11221 /**
11222 * Store information about the start and end of the track and the
11223 * duration for each frame/sample we process in order to calculate
11224 * the baseMediaDecodeTime
11225 */
11226
11227 var collectDtsInfo = function collectDtsInfo(track, data) {
11228 if (typeof data.pts === 'number') {
11229 if (track.timelineStartInfo.pts === undefined) {
11230 track.timelineStartInfo.pts = data.pts;
11231 }
11232
11233 if (track.minSegmentPts === undefined) {
11234 track.minSegmentPts = data.pts;
11235 } else {
11236 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
11237 }
11238
11239 if (track.maxSegmentPts === undefined) {
11240 track.maxSegmentPts = data.pts;
11241 } else {
11242 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
11243 }
11244 }
11245
11246 if (typeof data.dts === 'number') {
11247 if (track.timelineStartInfo.dts === undefined) {
11248 track.timelineStartInfo.dts = data.dts;
11249 }
11250
11251 if (track.minSegmentDts === undefined) {
11252 track.minSegmentDts = data.dts;
11253 } else {
11254 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
11255 }
11256
11257 if (track.maxSegmentDts === undefined) {
11258 track.maxSegmentDts = data.dts;
11259 } else {
11260 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
11261 }
11262 }
11263 };
11264 /**
11265 * Clear values used to calculate the baseMediaDecodeTime between
11266 * tracks
11267 */
11268
11269
11270 var clearDtsInfo = function clearDtsInfo(track) {
11271 delete track.minSegmentDts;
11272 delete track.maxSegmentDts;
11273 delete track.minSegmentPts;
11274 delete track.maxSegmentPts;
11275 };
11276 /**
11277 * Calculate the track's baseMediaDecodeTime based on the earliest
11278 * DTS the transmuxer has ever seen and the minimum DTS for the
11279 * current track
11280 * @param track {object} track metadata configuration
11281 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
11282 * in the source; false to adjust the first segment to start at 0.
11283 */
11284
11285
11286 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
11287 var baseMediaDecodeTime,
11288 scale,
11289 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
11290
11291 if (!keepOriginalTimestamps) {
11292 minSegmentDts -= track.timelineStartInfo.dts;
11293 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
11294 // we want the start of the first segment to be placed
11295
11296
11297 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
11298
11299 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
11300
11301 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
11302
11303 if (track.type === 'audio') {
11304 // Audio has a different clock equal to the sampling_rate so we need to
11305 // scale the PTS values into the clock rate of the track
11306 scale = track.samplerate / ONE_SECOND_IN_TS$3;
11307 baseMediaDecodeTime *= scale;
11308 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
11309 }
11310
11311 return baseMediaDecodeTime;
11312 };
11313
11314 var trackDecodeInfo = {
11315 clearDtsInfo: clearDtsInfo,
11316 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
11317 collectDtsInfo: collectDtsInfo
11318 };
11319 /**
11320 * mux.js
11321 *
11322 * Copyright (c) Brightcove
11323 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11324 *
11325 * Reads in-band caption information from a video elementary
11326 * stream. Captions must follow the CEA-708 standard for injection
11327 * into an MPEG-2 transport streams.
11328 * @see https://en.wikipedia.org/wiki/CEA-708
11329 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
11330 */
11331 // payload type field to indicate how they are to be
11332 // interpreted. CEAS-708 caption content is always transmitted with
11333 // payload type 0x04.
11334
11335 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
11336 RBSP_TRAILING_BITS = 128;
11337 /**
11338 * Parse a supplemental enhancement information (SEI) NAL unit.
11339 * Stops parsing once a message of type ITU T T35 has been found.
11340 *
11341 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
11342 * @return {object} the parsed SEI payload
11343 * @see Rec. ITU-T H.264, 7.3.2.3.1
11344 */
11345
11346 var parseSei = function parseSei(bytes) {
11347 var i = 0,
11348 result = {
11349 payloadType: -1,
11350 payloadSize: 0
11351 },
11352 payloadType = 0,
11353 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
11354
11355 while (i < bytes.byteLength) {
11356 // stop once we have hit the end of the sei_rbsp
11357 if (bytes[i] === RBSP_TRAILING_BITS) {
11358 break;
11359 } // Parse payload type
11360
11361
11362 while (bytes[i] === 0xFF) {
11363 payloadType += 255;
11364 i++;
11365 }
11366
11367 payloadType += bytes[i++]; // Parse payload size
11368
11369 while (bytes[i] === 0xFF) {
11370 payloadSize += 255;
11371 i++;
11372 }
11373
11374 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
11375 // there can only ever be one caption message in a frame's sei
11376
11377 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
11378 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
11379
11380 if (userIdentifier === 'GA94') {
11381 result.payloadType = payloadType;
11382 result.payloadSize = payloadSize;
11383 result.payload = bytes.subarray(i, i + payloadSize);
11384 break;
11385 } else {
11386 result.payload = void 0;
11387 }
11388 } // skip the payload and parse the next message
11389
11390
11391 i += payloadSize;
11392 payloadType = 0;
11393 payloadSize = 0;
11394 }
11395
11396 return result;
11397 }; // see ANSI/SCTE 128-1 (2013), section 8.1
11398
11399
11400 var parseUserData = function parseUserData(sei) {
11401 // itu_t_t35_contry_code must be 181 (United States) for
11402 // captions
11403 if (sei.payload[0] !== 181) {
11404 return null;
11405 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
11406
11407
11408 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
11409 return null;
11410 } // the user_identifier should be "GA94" to indicate ATSC1 data
11411
11412
11413 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
11414 return null;
11415 } // finally, user_data_type_code should be 0x03 for caption data
11416
11417
11418 if (sei.payload[7] !== 0x03) {
11419 return null;
11420 } // return the user_data_type_structure and strip the trailing
11421 // marker bits
11422
11423
11424 return sei.payload.subarray(8, sei.payload.length - 1);
11425 }; // see CEA-708-D, section 4.4
11426
11427
11428 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
11429 var results = [],
11430 i,
11431 count,
11432 offset,
11433 data; // if this is just filler, return immediately
11434
11435 if (!(userData[0] & 0x40)) {
11436 return results;
11437 } // parse out the cc_data_1 and cc_data_2 fields
11438
11439
11440 count = userData[0] & 0x1f;
11441
11442 for (i = 0; i < count; i++) {
11443 offset = i * 3;
11444 data = {
11445 type: userData[offset + 2] & 0x03,
11446 pts: pts
11447 }; // capture cc data when cc_valid is 1
11448
11449 if (userData[offset + 2] & 0x04) {
11450 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
11451 results.push(data);
11452 }
11453 }
11454
11455 return results;
11456 };
11457
11458 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
11459 var length = data.byteLength,
11460 emulationPreventionBytesPositions = [],
11461 i = 1,
11462 newLength,
11463 newData; // Find all `Emulation Prevention Bytes`
11464
11465 while (i < length - 2) {
11466 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
11467 emulationPreventionBytesPositions.push(i + 2);
11468 i += 2;
11469 } else {
11470 i++;
11471 }
11472 } // If no Emulation Prevention Bytes were found just return the original
11473 // array
11474
11475
11476 if (emulationPreventionBytesPositions.length === 0) {
11477 return data;
11478 } // Create a new array to hold the NAL unit data
11479
11480
11481 newLength = length - emulationPreventionBytesPositions.length;
11482 newData = new Uint8Array(newLength);
11483 var sourceIndex = 0;
11484
11485 for (i = 0; i < newLength; sourceIndex++, i++) {
11486 if (sourceIndex === emulationPreventionBytesPositions[0]) {
11487 // Skip this byte
11488 sourceIndex++; // Remove this position index
11489
11490 emulationPreventionBytesPositions.shift();
11491 }
11492
11493 newData[i] = data[sourceIndex];
11494 }
11495
11496 return newData;
11497 }; // exports
11498
11499
11500 var captionPacketParser = {
11501 parseSei: parseSei,
11502 parseUserData: parseUserData,
11503 parseCaptionPackets: parseCaptionPackets,
11504 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
11505 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
11506 }; // Link To Transport
11507 // -----------------
11508
11509 var CaptionStream$1 = function CaptionStream(options) {
11510 options = options || {};
11511 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
11512
11513 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
11514 this.captionPackets_ = [];
11515 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
11516 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
11517 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
11518 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
11519 ];
11520
11521 if (this.parse708captions_) {
11522 this.cc708Stream_ = new Cea708Stream({
11523 captionServices: options.captionServices
11524 }); // eslint-disable-line no-use-before-define
11525 }
11526
11527 this.reset(); // forward data and done events from CCs to this CaptionStream
11528
11529 this.ccStreams_.forEach(function (cc) {
11530 cc.on('data', this.trigger.bind(this, 'data'));
11531 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
11532 cc.on('done', this.trigger.bind(this, 'done'));
11533 }, this);
11534
11535 if (this.parse708captions_) {
11536 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
11537 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
11538 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
11539 }
11540 };
11541
11542 CaptionStream$1.prototype = new stream();
11543
11544 CaptionStream$1.prototype.push = function (event) {
11545 var sei, userData, newCaptionPackets; // only examine SEI NALs
11546
11547 if (event.nalUnitType !== 'sei_rbsp') {
11548 return;
11549 } // parse the sei
11550
11551
11552 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
11553
11554 if (!sei.payload) {
11555 return;
11556 } // ignore everything but user_data_registered_itu_t_t35
11557
11558
11559 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
11560 return;
11561 } // parse out the user data payload
11562
11563
11564 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
11565
11566 if (!userData) {
11567 return;
11568 } // Sometimes, the same segment # will be downloaded twice. To stop the
11569 // caption data from being processed twice, we track the latest dts we've
11570 // received and ignore everything with a dts before that. However, since
11571 // data for a specific dts can be split across packets on either side of
11572 // a segment boundary, we need to make sure we *don't* ignore the packets
11573 // from the *next* segment that have dts === this.latestDts_. By constantly
11574 // tracking the number of packets received with dts === this.latestDts_, we
11575 // know how many should be ignored once we start receiving duplicates.
11576
11577
11578 if (event.dts < this.latestDts_) {
11579 // We've started getting older data, so set the flag.
11580 this.ignoreNextEqualDts_ = true;
11581 return;
11582 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
11583 this.numSameDts_--;
11584
11585 if (!this.numSameDts_) {
11586 // We've received the last duplicate packet, time to start processing again
11587 this.ignoreNextEqualDts_ = false;
11588 }
11589
11590 return;
11591 } // parse out CC data packets and save them for later
11592
11593
11594 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
11595 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
11596
11597 if (this.latestDts_ !== event.dts) {
11598 this.numSameDts_ = 0;
11599 }
11600
11601 this.numSameDts_++;
11602 this.latestDts_ = event.dts;
11603 };
11604
11605 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
11606 this.ccStreams_.forEach(function (cc) {
11607 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
11608 }, this);
11609 };
11610
11611 CaptionStream$1.prototype.flushStream = function (flushType) {
11612 // make sure we actually parsed captions before proceeding
11613 if (!this.captionPackets_.length) {
11614 this.flushCCStreams(flushType);
11615 return;
11616 } // In Chrome, the Array#sort function is not stable so add a
11617 // presortIndex that we can use to ensure we get a stable-sort
11618
11619
11620 this.captionPackets_.forEach(function (elem, idx) {
11621 elem.presortIndex = idx;
11622 }); // sort caption byte-pairs based on their PTS values
11623
11624 this.captionPackets_.sort(function (a, b) {
11625 if (a.pts === b.pts) {
11626 return a.presortIndex - b.presortIndex;
11627 }
11628
11629 return a.pts - b.pts;
11630 });
11631 this.captionPackets_.forEach(function (packet) {
11632 if (packet.type < 2) {
11633 // Dispatch packet to the right Cea608Stream
11634 this.dispatchCea608Packet(packet);
11635 } else {
11636 // Dispatch packet to the Cea708Stream
11637 this.dispatchCea708Packet(packet);
11638 }
11639 }, this);
11640 this.captionPackets_.length = 0;
11641 this.flushCCStreams(flushType);
11642 };
11643
11644 CaptionStream$1.prototype.flush = function () {
11645 return this.flushStream('flush');
11646 }; // Only called if handling partial data
11647
11648
11649 CaptionStream$1.prototype.partialFlush = function () {
11650 return this.flushStream('partialFlush');
11651 };
11652
11653 CaptionStream$1.prototype.reset = function () {
11654 this.latestDts_ = null;
11655 this.ignoreNextEqualDts_ = false;
11656 this.numSameDts_ = 0;
11657 this.activeCea608Channel_ = [null, null];
11658 this.ccStreams_.forEach(function (ccStream) {
11659 ccStream.reset();
11660 });
11661 }; // From the CEA-608 spec:
11662
11663 /*
11664 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
11665 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
11666 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
11667 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
11668 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
11669 * to switch to captioning or Text.
11670 */
11671 // With that in mind, we ignore any data between an XDS control code and a
11672 // subsequent closed-captioning control code.
11673
11674
11675 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
11676 // NOTE: packet.type is the CEA608 field
11677 if (this.setsTextOrXDSActive(packet)) {
11678 this.activeCea608Channel_[packet.type] = null;
11679 } else if (this.setsChannel1Active(packet)) {
11680 this.activeCea608Channel_[packet.type] = 0;
11681 } else if (this.setsChannel2Active(packet)) {
11682 this.activeCea608Channel_[packet.type] = 1;
11683 }
11684
11685 if (this.activeCea608Channel_[packet.type] === null) {
11686 // If we haven't received anything to set the active channel, or the
11687 // packets are Text/XDS data, discard the data; we don't want jumbled
11688 // captions
11689 return;
11690 }
11691
11692 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
11693 };
11694
11695 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
11696 return (packet.ccData & 0x7800) === 0x1000;
11697 };
11698
11699 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
11700 return (packet.ccData & 0x7800) === 0x1800;
11701 };
11702
11703 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
11704 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
11705 };
11706
11707 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
11708 if (this.parse708captions_) {
11709 this.cc708Stream_.push(packet);
11710 }
11711 }; // ----------------------
11712 // Session to Application
11713 // ----------------------
11714 // This hash maps special and extended character codes to their
11715 // proper Unicode equivalent. The first one-byte key is just a
11716 // non-standard character code. The two-byte keys that follow are
11717 // the extended CEA708 character codes, along with the preceding
11718 // 0x10 extended character byte to distinguish these codes from
11719 // non-extended character codes. Every CEA708 character code that
11720 // is not in this object maps directly to a standard unicode
11721 // character code.
11722 // The transparent space and non-breaking transparent space are
11723 // technically not fully supported since there is no code to
11724 // make them transparent, so they have normal non-transparent
11725 // stand-ins.
11726 // The special closed caption (CC) character isn't a standard
11727 // unicode character, so a fairly similar unicode character was
11728 // chosen in it's place.
11729
11730
11731 var CHARACTER_TRANSLATION_708 = {
11732 0x7f: 0x266a,
11733 // ♪
11734 0x1020: 0x20,
11735 // Transparent Space
11736 0x1021: 0xa0,
11737 // Nob-breaking Transparent Space
11738 0x1025: 0x2026,
11739 // …
11740 0x102a: 0x0160,
11741 // Š
11742 0x102c: 0x0152,
11743 // Œ
11744 0x1030: 0x2588,
11745 // █
11746 0x1031: 0x2018,
11747 // ‘
11748 0x1032: 0x2019,
11749 // ’
11750 0x1033: 0x201c,
11751 // “
11752 0x1034: 0x201d,
11753 // ”
11754 0x1035: 0x2022,
11755 // •
11756 0x1039: 0x2122,
11757 // ™
11758 0x103a: 0x0161,
11759 // š
11760 0x103c: 0x0153,
11761 // œ
11762 0x103d: 0x2120,
11763 // ℠
11764 0x103f: 0x0178,
11765 // Ÿ
11766 0x1076: 0x215b,
11767 // ⅛
11768 0x1077: 0x215c,
11769 // ⅜
11770 0x1078: 0x215d,
11771 // ⅝
11772 0x1079: 0x215e,
11773 // ⅞
11774 0x107a: 0x23d0,
11775 // ⏐
11776 0x107b: 0x23a4,
11777 // ⎤
11778 0x107c: 0x23a3,
11779 // ⎣
11780 0x107d: 0x23af,
11781 // ⎯
11782 0x107e: 0x23a6,
11783 // ⎦
11784 0x107f: 0x23a1,
11785 // ⎡
11786 0x10a0: 0x3138 // ㄸ (CC char)
11787
11788 };
11789
11790 var get708CharFromCode = function get708CharFromCode(code) {
11791 var newCode = CHARACTER_TRANSLATION_708[code] || code;
11792
11793 if (code & 0x1000 && code === newCode) {
11794 // Invalid extended code
11795 return '';
11796 }
11797
11798 return String.fromCharCode(newCode);
11799 };
11800
11801 var within708TextBlock = function within708TextBlock(b) {
11802 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
11803 };
11804
11805 var Cea708Window = function Cea708Window(windowNum) {
11806 this.windowNum = windowNum;
11807 this.reset();
11808 };
11809
11810 Cea708Window.prototype.reset = function () {
11811 this.clearText();
11812 this.pendingNewLine = false;
11813 this.winAttr = {};
11814 this.penAttr = {};
11815 this.penLoc = {};
11816 this.penColor = {}; // These default values are arbitrary,
11817 // defineWindow will usually override them
11818
11819 this.visible = 0;
11820 this.rowLock = 0;
11821 this.columnLock = 0;
11822 this.priority = 0;
11823 this.relativePositioning = 0;
11824 this.anchorVertical = 0;
11825 this.anchorHorizontal = 0;
11826 this.anchorPoint = 0;
11827 this.rowCount = 1;
11828 this.virtualRowCount = this.rowCount + 1;
11829 this.columnCount = 41;
11830 this.windowStyle = 0;
11831 this.penStyle = 0;
11832 };
11833
11834 Cea708Window.prototype.getText = function () {
11835 return this.rows.join('\n');
11836 };
11837
11838 Cea708Window.prototype.clearText = function () {
11839 this.rows = [''];
11840 this.rowIdx = 0;
11841 };
11842
11843 Cea708Window.prototype.newLine = function (pts) {
11844 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
11845 this.beforeRowOverflow(pts);
11846 }
11847
11848 if (this.rows.length > 0) {
11849 this.rows.push('');
11850 this.rowIdx++;
11851 } // Show all virtual rows since there's no visible scrolling
11852
11853
11854 while (this.rows.length > this.virtualRowCount) {
11855 this.rows.shift();
11856 this.rowIdx--;
11857 }
11858 };
11859
11860 Cea708Window.prototype.isEmpty = function () {
11861 if (this.rows.length === 0) {
11862 return true;
11863 } else if (this.rows.length === 1) {
11864 return this.rows[0] === '';
11865 }
11866
11867 return false;
11868 };
11869
11870 Cea708Window.prototype.addText = function (text) {
11871 this.rows[this.rowIdx] += text;
11872 };
11873
11874 Cea708Window.prototype.backspace = function () {
11875 if (!this.isEmpty()) {
11876 var row = this.rows[this.rowIdx];
11877 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
11878 }
11879 };
11880
11881 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
11882 this.serviceNum = serviceNum;
11883 this.text = '';
11884 this.currentWindow = new Cea708Window(-1);
11885 this.windows = [];
11886 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
11887
11888 if (typeof encoding === 'string') {
11889 this.createTextDecoder(encoding);
11890 }
11891 };
11892 /**
11893 * Initialize service windows
11894 * Must be run before service use
11895 *
11896 * @param {Integer} pts PTS value
11897 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
11898 */
11899
11900
11901 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
11902 this.startPts = pts;
11903
11904 for (var win = 0; win < 8; win++) {
11905 this.windows[win] = new Cea708Window(win);
11906
11907 if (typeof beforeRowOverflow === 'function') {
11908 this.windows[win].beforeRowOverflow = beforeRowOverflow;
11909 }
11910 }
11911 };
11912 /**
11913 * Set current window of service to be affected by commands
11914 *
11915 * @param {Integer} windowNum Window number
11916 */
11917
11918
11919 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
11920 this.currentWindow = this.windows[windowNum];
11921 };
11922 /**
11923 * Try to create a TextDecoder if it is natively supported
11924 */
11925
11926
11927 Cea708Service.prototype.createTextDecoder = function (encoding) {
11928 if (typeof TextDecoder === 'undefined') {
11929 this.stream.trigger('log', {
11930 level: 'warn',
11931 message: 'The `encoding` option is unsupported without TextDecoder support'
11932 });
11933 } else {
11934 try {
11935 this.textDecoder_ = new TextDecoder(encoding);
11936 } catch (error) {
11937 this.stream.trigger('log', {
11938 level: 'warn',
11939 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
11940 });
11941 }
11942 }
11943 };
11944
11945 var Cea708Stream = function Cea708Stream(options) {
11946 options = options || {};
11947 Cea708Stream.prototype.init.call(this);
11948 var self = this;
11949 var captionServices = options.captionServices || {};
11950 var captionServiceEncodings = {};
11951 var serviceProps; // Get service encodings from captionServices option block
11952
11953 Object.keys(captionServices).forEach(function (serviceName) {
11954 serviceProps = captionServices[serviceName];
11955
11956 if (/^SERVICE/.test(serviceName)) {
11957 captionServiceEncodings[serviceName] = serviceProps.encoding;
11958 }
11959 });
11960 this.serviceEncodings = captionServiceEncodings;
11961 this.current708Packet = null;
11962 this.services = {};
11963
11964 this.push = function (packet) {
11965 if (packet.type === 3) {
11966 // 708 packet start
11967 self.new708Packet();
11968 self.add708Bytes(packet);
11969 } else {
11970 if (self.current708Packet === null) {
11971 // This should only happen at the start of a file if there's no packet start.
11972 self.new708Packet();
11973 }
11974
11975 self.add708Bytes(packet);
11976 }
11977 };
11978 };
11979
11980 Cea708Stream.prototype = new stream();
11981 /**
11982 * Push current 708 packet, create new 708 packet.
11983 */
11984
11985 Cea708Stream.prototype.new708Packet = function () {
11986 if (this.current708Packet !== null) {
11987 this.push708Packet();
11988 }
11989
11990 this.current708Packet = {
11991 data: [],
11992 ptsVals: []
11993 };
11994 };
11995 /**
11996 * Add pts and both bytes from packet into current 708 packet.
11997 */
11998
11999
12000 Cea708Stream.prototype.add708Bytes = function (packet) {
12001 var data = packet.ccData;
12002 var byte0 = data >>> 8;
12003 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
12004 // that service blocks will always line up with byte pairs.
12005
12006 this.current708Packet.ptsVals.push(packet.pts);
12007 this.current708Packet.data.push(byte0);
12008 this.current708Packet.data.push(byte1);
12009 };
12010 /**
12011 * Parse completed 708 packet into service blocks and push each service block.
12012 */
12013
12014
12015 Cea708Stream.prototype.push708Packet = function () {
12016 var packet708 = this.current708Packet;
12017 var packetData = packet708.data;
12018 var serviceNum = null;
12019 var blockSize = null;
12020 var i = 0;
12021 var b = packetData[i++];
12022 packet708.seq = b >> 6;
12023 packet708.sizeCode = b & 0x3f; // 0b00111111;
12024
12025 for (; i < packetData.length; i++) {
12026 b = packetData[i++];
12027 serviceNum = b >> 5;
12028 blockSize = b & 0x1f; // 0b00011111
12029
12030 if (serviceNum === 7 && blockSize > 0) {
12031 // Extended service num
12032 b = packetData[i++];
12033 serviceNum = b;
12034 }
12035
12036 this.pushServiceBlock(serviceNum, i, blockSize);
12037
12038 if (blockSize > 0) {
12039 i += blockSize - 1;
12040 }
12041 }
12042 };
12043 /**
12044 * Parse service block, execute commands, read text.
12045 *
12046 * Note: While many of these commands serve important purposes,
12047 * many others just parse out the parameters or attributes, but
12048 * nothing is done with them because this is not a full and complete
12049 * implementation of the entire 708 spec.
12050 *
12051 * @param {Integer} serviceNum Service number
12052 * @param {Integer} start Start index of the 708 packet data
12053 * @param {Integer} size Block size
12054 */
12055
12056
12057 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
12058 var b;
12059 var i = start;
12060 var packetData = this.current708Packet.data;
12061 var service = this.services[serviceNum];
12062
12063 if (!service) {
12064 service = this.initService(serviceNum, i);
12065 }
12066
12067 for (; i < start + size && i < packetData.length; i++) {
12068 b = packetData[i];
12069
12070 if (within708TextBlock(b)) {
12071 i = this.handleText(i, service);
12072 } else if (b === 0x18) {
12073 i = this.multiByteCharacter(i, service);
12074 } else if (b === 0x10) {
12075 i = this.extendedCommands(i, service);
12076 } else if (0x80 <= b && b <= 0x87) {
12077 i = this.setCurrentWindow(i, service);
12078 } else if (0x98 <= b && b <= 0x9f) {
12079 i = this.defineWindow(i, service);
12080 } else if (b === 0x88) {
12081 i = this.clearWindows(i, service);
12082 } else if (b === 0x8c) {
12083 i = this.deleteWindows(i, service);
12084 } else if (b === 0x89) {
12085 i = this.displayWindows(i, service);
12086 } else if (b === 0x8a) {
12087 i = this.hideWindows(i, service);
12088 } else if (b === 0x8b) {
12089 i = this.toggleWindows(i, service);
12090 } else if (b === 0x97) {
12091 i = this.setWindowAttributes(i, service);
12092 } else if (b === 0x90) {
12093 i = this.setPenAttributes(i, service);
12094 } else if (b === 0x91) {
12095 i = this.setPenColor(i, service);
12096 } else if (b === 0x92) {
12097 i = this.setPenLocation(i, service);
12098 } else if (b === 0x8f) {
12099 service = this.reset(i, service);
12100 } else if (b === 0x08) {
12101 // BS: Backspace
12102 service.currentWindow.backspace();
12103 } else if (b === 0x0c) {
12104 // FF: Form feed
12105 service.currentWindow.clearText();
12106 } else if (b === 0x0d) {
12107 // CR: Carriage return
12108 service.currentWindow.pendingNewLine = true;
12109 } else if (b === 0x0e) {
12110 // HCR: Horizontal carriage return
12111 service.currentWindow.clearText();
12112 } else if (b === 0x8d) {
12113 // DLY: Delay, nothing to do
12114 i++;
12115 } else ;
12116 }
12117 };
12118 /**
12119 * Execute an extended command
12120 *
12121 * @param {Integer} i Current index in the 708 packet
12122 * @param {Service} service The service object to be affected
12123 * @return {Integer} New index after parsing
12124 */
12125
12126
12127 Cea708Stream.prototype.extendedCommands = function (i, service) {
12128 var packetData = this.current708Packet.data;
12129 var b = packetData[++i];
12130
12131 if (within708TextBlock(b)) {
12132 i = this.handleText(i, service, {
12133 isExtended: true
12134 });
12135 }
12136
12137 return i;
12138 };
12139 /**
12140 * Get PTS value of a given byte index
12141 *
12142 * @param {Integer} byteIndex Index of the byte
12143 * @return {Integer} PTS
12144 */
12145
12146
12147 Cea708Stream.prototype.getPts = function (byteIndex) {
12148 // There's 1 pts value per 2 bytes
12149 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
12150 };
12151 /**
12152 * Initializes a service
12153 *
12154 * @param {Integer} serviceNum Service number
12155 * @return {Service} Initialized service object
12156 */
12157
12158
12159 Cea708Stream.prototype.initService = function (serviceNum, i) {
12160 var serviceName = 'SERVICE' + serviceNum;
12161 var self = this;
12162 var serviceName;
12163 var encoding;
12164
12165 if (serviceName in this.serviceEncodings) {
12166 encoding = this.serviceEncodings[serviceName];
12167 }
12168
12169 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
12170 this.services[serviceNum].init(this.getPts(i), function (pts) {
12171 self.flushDisplayed(pts, self.services[serviceNum]);
12172 });
12173 return this.services[serviceNum];
12174 };
12175 /**
12176 * Execute text writing to current window
12177 *
12178 * @param {Integer} i Current index in the 708 packet
12179 * @param {Service} service The service object to be affected
12180 * @return {Integer} New index after parsing
12181 */
12182
12183
12184 Cea708Stream.prototype.handleText = function (i, service, options) {
12185 var isExtended = options && options.isExtended;
12186 var isMultiByte = options && options.isMultiByte;
12187 var packetData = this.current708Packet.data;
12188 var extended = isExtended ? 0x1000 : 0x0000;
12189 var currentByte = packetData[i];
12190 var nextByte = packetData[i + 1];
12191 var win = service.currentWindow;
12192 var char;
12193 var charCodeArray; // Use the TextDecoder if one was created for this service
12194
12195 if (service.textDecoder_ && !isExtended) {
12196 if (isMultiByte) {
12197 charCodeArray = [currentByte, nextByte];
12198 i++;
12199 } else {
12200 charCodeArray = [currentByte];
12201 }
12202
12203 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
12204 } else {
12205 char = get708CharFromCode(extended | currentByte);
12206 }
12207
12208 if (win.pendingNewLine && !win.isEmpty()) {
12209 win.newLine(this.getPts(i));
12210 }
12211
12212 win.pendingNewLine = false;
12213 win.addText(char);
12214 return i;
12215 };
12216 /**
12217 * Handle decoding of multibyte character
12218 *
12219 * @param {Integer} i Current index in the 708 packet
12220 * @param {Service} service The service object to be affected
12221 * @return {Integer} New index after parsing
12222 */
12223
12224
12225 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
12226 var packetData = this.current708Packet.data;
12227 var firstByte = packetData[i + 1];
12228 var secondByte = packetData[i + 2];
12229
12230 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
12231 i = this.handleText(++i, service, {
12232 isMultiByte: true
12233 });
12234 }
12235
12236 return i;
12237 };
12238 /**
12239 * Parse and execute the CW# command.
12240 *
12241 * Set the current window.
12242 *
12243 * @param {Integer} i Current index in the 708 packet
12244 * @param {Service} service The service object to be affected
12245 * @return {Integer} New index after parsing
12246 */
12247
12248
12249 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
12250 var packetData = this.current708Packet.data;
12251 var b = packetData[i];
12252 var windowNum = b & 0x07;
12253 service.setCurrentWindow(windowNum);
12254 return i;
12255 };
12256 /**
12257 * Parse and execute the DF# command.
12258 *
12259 * Define a window and set it as the current window.
12260 *
12261 * @param {Integer} i Current index in the 708 packet
12262 * @param {Service} service The service object to be affected
12263 * @return {Integer} New index after parsing
12264 */
12265
12266
12267 Cea708Stream.prototype.defineWindow = function (i, service) {
12268 var packetData = this.current708Packet.data;
12269 var b = packetData[i];
12270 var windowNum = b & 0x07;
12271 service.setCurrentWindow(windowNum);
12272 var win = service.currentWindow;
12273 b = packetData[++i];
12274 win.visible = (b & 0x20) >> 5; // v
12275
12276 win.rowLock = (b & 0x10) >> 4; // rl
12277
12278 win.columnLock = (b & 0x08) >> 3; // cl
12279
12280 win.priority = b & 0x07; // p
12281
12282 b = packetData[++i];
12283 win.relativePositioning = (b & 0x80) >> 7; // rp
12284
12285 win.anchorVertical = b & 0x7f; // av
12286
12287 b = packetData[++i];
12288 win.anchorHorizontal = b; // ah
12289
12290 b = packetData[++i];
12291 win.anchorPoint = (b & 0xf0) >> 4; // ap
12292
12293 win.rowCount = b & 0x0f; // rc
12294
12295 b = packetData[++i];
12296 win.columnCount = b & 0x3f; // cc
12297
12298 b = packetData[++i];
12299 win.windowStyle = (b & 0x38) >> 3; // ws
12300
12301 win.penStyle = b & 0x07; // ps
12302 // The spec says there are (rowCount+1) "virtual rows"
12303
12304 win.virtualRowCount = win.rowCount + 1;
12305 return i;
12306 };
12307 /**
12308 * Parse and execute the SWA command.
12309 *
12310 * Set attributes of the current window.
12311 *
12312 * @param {Integer} i Current index in the 708 packet
12313 * @param {Service} service The service object to be affected
12314 * @return {Integer} New index after parsing
12315 */
12316
12317
12318 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
12319 var packetData = this.current708Packet.data;
12320 var b = packetData[i];
12321 var winAttr = service.currentWindow.winAttr;
12322 b = packetData[++i];
12323 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
12324
12325 winAttr.fillRed = (b & 0x30) >> 4; // fr
12326
12327 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
12328
12329 winAttr.fillBlue = b & 0x03; // fb
12330
12331 b = packetData[++i];
12332 winAttr.borderType = (b & 0xc0) >> 6; // bt
12333
12334 winAttr.borderRed = (b & 0x30) >> 4; // br
12335
12336 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
12337
12338 winAttr.borderBlue = b & 0x03; // bb
12339
12340 b = packetData[++i];
12341 winAttr.borderType += (b & 0x80) >> 5; // bt
12342
12343 winAttr.wordWrap = (b & 0x40) >> 6; // ww
12344
12345 winAttr.printDirection = (b & 0x30) >> 4; // pd
12346
12347 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
12348
12349 winAttr.justify = b & 0x03; // j
12350
12351 b = packetData[++i];
12352 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
12353
12354 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
12355
12356 winAttr.displayEffect = b & 0x03; // de
12357
12358 return i;
12359 };
12360 /**
12361 * Gather text from all displayed windows and push a caption to output.
12362 *
12363 * @param {Integer} i Current index in the 708 packet
12364 * @param {Service} service The service object to be affected
12365 */
12366
12367
12368 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
12369 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
12370 // display text in the correct order, but sample files so far have not shown any issue.
12371
12372 for (var winId = 0; winId < 8; winId++) {
12373 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
12374 displayedText.push(service.windows[winId].getText());
12375 }
12376 }
12377
12378 service.endPts = pts;
12379 service.text = displayedText.join('\n\n');
12380 this.pushCaption(service);
12381 service.startPts = pts;
12382 };
12383 /**
12384 * Push a caption to output if the caption contains text.
12385 *
12386 * @param {Service} service The service object to be affected
12387 */
12388
12389
12390 Cea708Stream.prototype.pushCaption = function (service) {
12391 if (service.text !== '') {
12392 this.trigger('data', {
12393 startPts: service.startPts,
12394 endPts: service.endPts,
12395 text: service.text,
12396 stream: 'cc708_' + service.serviceNum
12397 });
12398 service.text = '';
12399 service.startPts = service.endPts;
12400 }
12401 };
12402 /**
12403 * Parse and execute the DSW command.
12404 *
12405 * Set visible property of windows based on the parsed bitmask.
12406 *
12407 * @param {Integer} i Current index in the 708 packet
12408 * @param {Service} service The service object to be affected
12409 * @return {Integer} New index after parsing
12410 */
12411
12412
12413 Cea708Stream.prototype.displayWindows = function (i, service) {
12414 var packetData = this.current708Packet.data;
12415 var b = packetData[++i];
12416 var pts = this.getPts(i);
12417 this.flushDisplayed(pts, service);
12418
12419 for (var winId = 0; winId < 8; winId++) {
12420 if (b & 0x01 << winId) {
12421 service.windows[winId].visible = 1;
12422 }
12423 }
12424
12425 return i;
12426 };
12427 /**
12428 * Parse and execute the HDW command.
12429 *
12430 * Set visible property of windows based on the parsed bitmask.
12431 *
12432 * @param {Integer} i Current index in the 708 packet
12433 * @param {Service} service The service object to be affected
12434 * @return {Integer} New index after parsing
12435 */
12436
12437
12438 Cea708Stream.prototype.hideWindows = function (i, service) {
12439 var packetData = this.current708Packet.data;
12440 var b = packetData[++i];
12441 var pts = this.getPts(i);
12442 this.flushDisplayed(pts, service);
12443
12444 for (var winId = 0; winId < 8; winId++) {
12445 if (b & 0x01 << winId) {
12446 service.windows[winId].visible = 0;
12447 }
12448 }
12449
12450 return i;
12451 };
12452 /**
12453 * Parse and execute the TGW command.
12454 *
12455 * Set visible property of windows based on the parsed bitmask.
12456 *
12457 * @param {Integer} i Current index in the 708 packet
12458 * @param {Service} service The service object to be affected
12459 * @return {Integer} New index after parsing
12460 */
12461
12462
12463 Cea708Stream.prototype.toggleWindows = function (i, service) {
12464 var packetData = this.current708Packet.data;
12465 var b = packetData[++i];
12466 var pts = this.getPts(i);
12467 this.flushDisplayed(pts, service);
12468
12469 for (var winId = 0; winId < 8; winId++) {
12470 if (b & 0x01 << winId) {
12471 service.windows[winId].visible ^= 1;
12472 }
12473 }
12474
12475 return i;
12476 };
12477 /**
12478 * Parse and execute the CLW command.
12479 *
12480 * Clear text of windows based on the parsed bitmask.
12481 *
12482 * @param {Integer} i Current index in the 708 packet
12483 * @param {Service} service The service object to be affected
12484 * @return {Integer} New index after parsing
12485 */
12486
12487
12488 Cea708Stream.prototype.clearWindows = function (i, service) {
12489 var packetData = this.current708Packet.data;
12490 var b = packetData[++i];
12491 var pts = this.getPts(i);
12492 this.flushDisplayed(pts, service);
12493
12494 for (var winId = 0; winId < 8; winId++) {
12495 if (b & 0x01 << winId) {
12496 service.windows[winId].clearText();
12497 }
12498 }
12499
12500 return i;
12501 };
12502 /**
12503 * Parse and execute the DLW command.
12504 *
12505 * Re-initialize windows based on the parsed bitmask.
12506 *
12507 * @param {Integer} i Current index in the 708 packet
12508 * @param {Service} service The service object to be affected
12509 * @return {Integer} New index after parsing
12510 */
12511
12512
12513 Cea708Stream.prototype.deleteWindows = function (i, service) {
12514 var packetData = this.current708Packet.data;
12515 var b = packetData[++i];
12516 var pts = this.getPts(i);
12517 this.flushDisplayed(pts, service);
12518
12519 for (var winId = 0; winId < 8; winId++) {
12520 if (b & 0x01 << winId) {
12521 service.windows[winId].reset();
12522 }
12523 }
12524
12525 return i;
12526 };
12527 /**
12528 * Parse and execute the SPA command.
12529 *
12530 * Set pen attributes of the current window.
12531 *
12532 * @param {Integer} i Current index in the 708 packet
12533 * @param {Service} service The service object to be affected
12534 * @return {Integer} New index after parsing
12535 */
12536
12537
12538 Cea708Stream.prototype.setPenAttributes = function (i, service) {
12539 var packetData = this.current708Packet.data;
12540 var b = packetData[i];
12541 var penAttr = service.currentWindow.penAttr;
12542 b = packetData[++i];
12543 penAttr.textTag = (b & 0xf0) >> 4; // tt
12544
12545 penAttr.offset = (b & 0x0c) >> 2; // o
12546
12547 penAttr.penSize = b & 0x03; // s
12548
12549 b = packetData[++i];
12550 penAttr.italics = (b & 0x80) >> 7; // i
12551
12552 penAttr.underline = (b & 0x40) >> 6; // u
12553
12554 penAttr.edgeType = (b & 0x38) >> 3; // et
12555
12556 penAttr.fontStyle = b & 0x07; // fs
12557
12558 return i;
12559 };
12560 /**
12561 * Parse and execute the SPC command.
12562 *
12563 * Set pen color of the current window.
12564 *
12565 * @param {Integer} i Current index in the 708 packet
12566 * @param {Service} service The service object to be affected
12567 * @return {Integer} New index after parsing
12568 */
12569
12570
12571 Cea708Stream.prototype.setPenColor = function (i, service) {
12572 var packetData = this.current708Packet.data;
12573 var b = packetData[i];
12574 var penColor = service.currentWindow.penColor;
12575 b = packetData[++i];
12576 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
12577
12578 penColor.fgRed = (b & 0x30) >> 4; // fr
12579
12580 penColor.fgGreen = (b & 0x0c) >> 2; // fg
12581
12582 penColor.fgBlue = b & 0x03; // fb
12583
12584 b = packetData[++i];
12585 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
12586
12587 penColor.bgRed = (b & 0x30) >> 4; // br
12588
12589 penColor.bgGreen = (b & 0x0c) >> 2; // bg
12590
12591 penColor.bgBlue = b & 0x03; // bb
12592
12593 b = packetData[++i];
12594 penColor.edgeRed = (b & 0x30) >> 4; // er
12595
12596 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
12597
12598 penColor.edgeBlue = b & 0x03; // eb
12599
12600 return i;
12601 };
12602 /**
12603 * Parse and execute the SPL command.
12604 *
12605 * Set pen location of the current window.
12606 *
12607 * @param {Integer} i Current index in the 708 packet
12608 * @param {Service} service The service object to be affected
12609 * @return {Integer} New index after parsing
12610 */
12611
12612
12613 Cea708Stream.prototype.setPenLocation = function (i, service) {
12614 var packetData = this.current708Packet.data;
12615 var b = packetData[i];
12616 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
12617
12618 service.currentWindow.pendingNewLine = true;
12619 b = packetData[++i];
12620 penLoc.row = b & 0x0f; // r
12621
12622 b = packetData[++i];
12623 penLoc.column = b & 0x3f; // c
12624
12625 return i;
12626 };
12627 /**
12628 * Execute the RST command.
12629 *
12630 * Reset service to a clean slate. Re-initialize.
12631 *
12632 * @param {Integer} i Current index in the 708 packet
12633 * @param {Service} service The service object to be affected
12634 * @return {Service} Re-initialized service
12635 */
12636
12637
12638 Cea708Stream.prototype.reset = function (i, service) {
12639 var pts = this.getPts(i);
12640 this.flushDisplayed(pts, service);
12641 return this.initService(service.serviceNum, i);
12642 }; // This hash maps non-ASCII, special, and extended character codes to their
12643 // proper Unicode equivalent. The first keys that are only a single byte
12644 // are the non-standard ASCII characters, which simply map the CEA608 byte
12645 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
12646 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
12647 // can be performed regardless of the field and data channel on which the
12648 // character code was received.
12649
12650
12651 var CHARACTER_TRANSLATION = {
12652 0x2a: 0xe1,
12653 // á
12654 0x5c: 0xe9,
12655 // é
12656 0x5e: 0xed,
12657 // í
12658 0x5f: 0xf3,
12659 // ó
12660 0x60: 0xfa,
12661 // ú
12662 0x7b: 0xe7,
12663 // ç
12664 0x7c: 0xf7,
12665 // ÷
12666 0x7d: 0xd1,
12667 // Ñ
12668 0x7e: 0xf1,
12669 // ñ
12670 0x7f: 0x2588,
12671 // █
12672 0x0130: 0xae,
12673 // ®
12674 0x0131: 0xb0,
12675 // °
12676 0x0132: 0xbd,
12677 // ½
12678 0x0133: 0xbf,
12679 // ¿
12680 0x0134: 0x2122,
12681 // ™
12682 0x0135: 0xa2,
12683 // ¢
12684 0x0136: 0xa3,
12685 // £
12686 0x0137: 0x266a,
12687 // ♪
12688 0x0138: 0xe0,
12689 // à
12690 0x0139: 0xa0,
12691 //
12692 0x013a: 0xe8,
12693 // è
12694 0x013b: 0xe2,
12695 // â
12696 0x013c: 0xea,
12697 // ê
12698 0x013d: 0xee,
12699 // î
12700 0x013e: 0xf4,
12701 // ô
12702 0x013f: 0xfb,
12703 // û
12704 0x0220: 0xc1,
12705 // Á
12706 0x0221: 0xc9,
12707 // É
12708 0x0222: 0xd3,
12709 // Ó
12710 0x0223: 0xda,
12711 // Ú
12712 0x0224: 0xdc,
12713 // Ü
12714 0x0225: 0xfc,
12715 // ü
12716 0x0226: 0x2018,
12717 // ‘
12718 0x0227: 0xa1,
12719 // ¡
12720 0x0228: 0x2a,
12721 // *
12722 0x0229: 0x27,
12723 // '
12724 0x022a: 0x2014,
12725 // —
12726 0x022b: 0xa9,
12727 // ©
12728 0x022c: 0x2120,
12729 // ℠
12730 0x022d: 0x2022,
12731 // •
12732 0x022e: 0x201c,
12733 // “
12734 0x022f: 0x201d,
12735 // ”
12736 0x0230: 0xc0,
12737 // À
12738 0x0231: 0xc2,
12739 // Â
12740 0x0232: 0xc7,
12741 // Ç
12742 0x0233: 0xc8,
12743 // È
12744 0x0234: 0xca,
12745 // Ê
12746 0x0235: 0xcb,
12747 // Ë
12748 0x0236: 0xeb,
12749 // ë
12750 0x0237: 0xce,
12751 // Î
12752 0x0238: 0xcf,
12753 // Ï
12754 0x0239: 0xef,
12755 // ï
12756 0x023a: 0xd4,
12757 // Ô
12758 0x023b: 0xd9,
12759 // Ù
12760 0x023c: 0xf9,
12761 // ù
12762 0x023d: 0xdb,
12763 // Û
12764 0x023e: 0xab,
12765 // «
12766 0x023f: 0xbb,
12767 // »
12768 0x0320: 0xc3,
12769 // Ã
12770 0x0321: 0xe3,
12771 // ã
12772 0x0322: 0xcd,
12773 // Í
12774 0x0323: 0xcc,
12775 // Ì
12776 0x0324: 0xec,
12777 // ì
12778 0x0325: 0xd2,
12779 // Ò
12780 0x0326: 0xf2,
12781 // ò
12782 0x0327: 0xd5,
12783 // Õ
12784 0x0328: 0xf5,
12785 // õ
12786 0x0329: 0x7b,
12787 // {
12788 0x032a: 0x7d,
12789 // }
12790 0x032b: 0x5c,
12791 // \
12792 0x032c: 0x5e,
12793 // ^
12794 0x032d: 0x5f,
12795 // _
12796 0x032e: 0x7c,
12797 // |
12798 0x032f: 0x7e,
12799 // ~
12800 0x0330: 0xc4,
12801 // Ä
12802 0x0331: 0xe4,
12803 // ä
12804 0x0332: 0xd6,
12805 // Ö
12806 0x0333: 0xf6,
12807 // ö
12808 0x0334: 0xdf,
12809 // ß
12810 0x0335: 0xa5,
12811 // ¥
12812 0x0336: 0xa4,
12813 // ¤
12814 0x0337: 0x2502,
12815 // │
12816 0x0338: 0xc5,
12817 // Å
12818 0x0339: 0xe5,
12819 // å
12820 0x033a: 0xd8,
12821 // Ø
12822 0x033b: 0xf8,
12823 // ø
12824 0x033c: 0x250c,
12825 // ┌
12826 0x033d: 0x2510,
12827 // ┐
12828 0x033e: 0x2514,
12829 // └
12830 0x033f: 0x2518 // ┘
12831
12832 };
12833
12834 var getCharFromCode = function getCharFromCode(code) {
12835 if (code === null) {
12836 return '';
12837 }
12838
12839 code = CHARACTER_TRANSLATION[code] || code;
12840 return String.fromCharCode(code);
12841 }; // the index of the last row in a CEA-608 display buffer
12842
12843
12844 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
12845 // getting it through bit logic.
12846
12847 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
12848 // cells. The "bottom" row is the last element in the outer array.
12849
12850 var createDisplayBuffer = function createDisplayBuffer() {
12851 var result = [],
12852 i = BOTTOM_ROW + 1;
12853
12854 while (i--) {
12855 result.push('');
12856 }
12857
12858 return result;
12859 };
12860
12861 var Cea608Stream = function Cea608Stream(field, dataChannel) {
12862 Cea608Stream.prototype.init.call(this);
12863 this.field_ = field || 0;
12864 this.dataChannel_ = dataChannel || 0;
12865 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
12866 this.setConstants();
12867 this.reset();
12868
12869 this.push = function (packet) {
12870 var data, swap, char0, char1, text; // remove the parity bits
12871
12872 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
12873
12874 if (data === this.lastControlCode_) {
12875 this.lastControlCode_ = null;
12876 return;
12877 } // Store control codes
12878
12879
12880 if ((data & 0xf000) === 0x1000) {
12881 this.lastControlCode_ = data;
12882 } else if (data !== this.PADDING_) {
12883 this.lastControlCode_ = null;
12884 }
12885
12886 char0 = data >>> 8;
12887 char1 = data & 0xff;
12888
12889 if (data === this.PADDING_) {
12890 return;
12891 } else if (data === this.RESUME_CAPTION_LOADING_) {
12892 this.mode_ = 'popOn';
12893 } else if (data === this.END_OF_CAPTION_) {
12894 // If an EOC is received while in paint-on mode, the displayed caption
12895 // text should be swapped to non-displayed memory as if it was a pop-on
12896 // caption. Because of that, we should explicitly switch back to pop-on
12897 // mode
12898 this.mode_ = 'popOn';
12899 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
12900
12901 this.flushDisplayed(packet.pts); // flip memory
12902
12903 swap = this.displayed_;
12904 this.displayed_ = this.nonDisplayed_;
12905 this.nonDisplayed_ = swap; // start measuring the time to display the caption
12906
12907 this.startPts_ = packet.pts;
12908 } else if (data === this.ROLL_UP_2_ROWS_) {
12909 this.rollUpRows_ = 2;
12910 this.setRollUp(packet.pts);
12911 } else if (data === this.ROLL_UP_3_ROWS_) {
12912 this.rollUpRows_ = 3;
12913 this.setRollUp(packet.pts);
12914 } else if (data === this.ROLL_UP_4_ROWS_) {
12915 this.rollUpRows_ = 4;
12916 this.setRollUp(packet.pts);
12917 } else if (data === this.CARRIAGE_RETURN_) {
12918 this.clearFormatting(packet.pts);
12919 this.flushDisplayed(packet.pts);
12920 this.shiftRowsUp_();
12921 this.startPts_ = packet.pts;
12922 } else if (data === this.BACKSPACE_) {
12923 if (this.mode_ === 'popOn') {
12924 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
12925 } else {
12926 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
12927 }
12928 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
12929 this.flushDisplayed(packet.pts);
12930 this.displayed_ = createDisplayBuffer();
12931 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
12932 this.nonDisplayed_ = createDisplayBuffer();
12933 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
12934 if (this.mode_ !== 'paintOn') {
12935 // NOTE: This should be removed when proper caption positioning is
12936 // implemented
12937 this.flushDisplayed(packet.pts);
12938 this.displayed_ = createDisplayBuffer();
12939 }
12940
12941 this.mode_ = 'paintOn';
12942 this.startPts_ = packet.pts; // Append special characters to caption text
12943 } else if (this.isSpecialCharacter(char0, char1)) {
12944 // Bitmask char0 so that we can apply character transformations
12945 // regardless of field and data channel.
12946 // Then byte-shift to the left and OR with char1 so we can pass the
12947 // entire character code to `getCharFromCode`.
12948 char0 = (char0 & 0x03) << 8;
12949 text = getCharFromCode(char0 | char1);
12950 this[this.mode_](packet.pts, text);
12951 this.column_++; // Append extended characters to caption text
12952 } else if (this.isExtCharacter(char0, char1)) {
12953 // Extended characters always follow their "non-extended" equivalents.
12954 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
12955 // decoders are supposed to drop the "è", while compliant decoders
12956 // backspace the "e" and insert "è".
12957 // Delete the previous character
12958 if (this.mode_ === 'popOn') {
12959 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
12960 } else {
12961 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
12962 } // Bitmask char0 so that we can apply character transformations
12963 // regardless of field and data channel.
12964 // Then byte-shift to the left and OR with char1 so we can pass the
12965 // entire character code to `getCharFromCode`.
12966
12967
12968 char0 = (char0 & 0x03) << 8;
12969 text = getCharFromCode(char0 | char1);
12970 this[this.mode_](packet.pts, text);
12971 this.column_++; // Process mid-row codes
12972 } else if (this.isMidRowCode(char0, char1)) {
12973 // Attributes are not additive, so clear all formatting
12974 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
12975 // should be replaced with spaces, so add one now
12976
12977 this[this.mode_](packet.pts, ' ');
12978 this.column_++;
12979
12980 if ((char1 & 0xe) === 0xe) {
12981 this.addFormatting(packet.pts, ['i']);
12982 }
12983
12984 if ((char1 & 0x1) === 0x1) {
12985 this.addFormatting(packet.pts, ['u']);
12986 } // Detect offset control codes and adjust cursor
12987
12988 } else if (this.isOffsetControlCode(char0, char1)) {
12989 // Cursor position is set by indent PAC (see below) in 4-column
12990 // increments, with an additional offset code of 1-3 to reach any
12991 // of the 32 columns specified by CEA-608. So all we need to do
12992 // here is increment the column cursor by the given offset.
12993 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
12994 } else if (this.isPAC(char0, char1)) {
12995 // There's no logic for PAC -> row mapping, so we have to just
12996 // find the row code in an array and use its index :(
12997 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
12998
12999 if (this.mode_ === 'rollUp') {
13000 // This implies that the base row is incorrectly set.
13001 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
13002 // of roll-up rows set.
13003 if (row - this.rollUpRows_ + 1 < 0) {
13004 row = this.rollUpRows_ - 1;
13005 }
13006
13007 this.setRollUp(packet.pts, row);
13008 }
13009
13010 if (row !== this.row_) {
13011 // formatting is only persistent for current row
13012 this.clearFormatting(packet.pts);
13013 this.row_ = row;
13014 } // All PACs can apply underline, so detect and apply
13015 // (All odd-numbered second bytes set underline)
13016
13017
13018 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
13019 this.addFormatting(packet.pts, ['u']);
13020 }
13021
13022 if ((data & 0x10) === 0x10) {
13023 // We've got an indent level code. Each successive even number
13024 // increments the column cursor by 4, so we can get the desired
13025 // column position by bit-shifting to the right (to get n/2)
13026 // and multiplying by 4.
13027 this.column_ = ((data & 0xe) >> 1) * 4;
13028 }
13029
13030 if (this.isColorPAC(char1)) {
13031 // it's a color code, though we only support white, which
13032 // can be either normal or italicized. white italics can be
13033 // either 0x4e or 0x6e depending on the row, so we just
13034 // bitwise-and with 0xe to see if italics should be turned on
13035 if ((char1 & 0xe) === 0xe) {
13036 this.addFormatting(packet.pts, ['i']);
13037 }
13038 } // We have a normal character in char0, and possibly one in char1
13039
13040 } else if (this.isNormalChar(char0)) {
13041 if (char1 === 0x00) {
13042 char1 = null;
13043 }
13044
13045 text = getCharFromCode(char0);
13046 text += getCharFromCode(char1);
13047 this[this.mode_](packet.pts, text);
13048 this.column_ += text.length;
13049 } // finish data processing
13050
13051 };
13052 };
13053
13054 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
13055 // display buffer
13056
13057 Cea608Stream.prototype.flushDisplayed = function (pts) {
13058 var content = this.displayed_ // remove spaces from the start and end of the string
13059 .map(function (row, index) {
13060 try {
13061 return row.trim();
13062 } catch (e) {
13063 // Ordinarily, this shouldn't happen. However, caption
13064 // parsing errors should not throw exceptions and
13065 // break playback.
13066 this.trigger('log', {
13067 level: 'warn',
13068 message: 'Skipping a malformed 608 caption at index ' + index + '.'
13069 });
13070 return '';
13071 }
13072 }, this) // combine all text rows to display in one cue
13073 .join('\n') // and remove blank rows from the start and end, but not the middle
13074 .replace(/^\n+|\n+$/g, '');
13075
13076 if (content.length) {
13077 this.trigger('data', {
13078 startPts: this.startPts_,
13079 endPts: pts,
13080 text: content,
13081 stream: this.name_
13082 });
13083 }
13084 };
13085 /**
13086 * Zero out the data, used for startup and on seek
13087 */
13088
13089
13090 Cea608Stream.prototype.reset = function () {
13091 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
13092 // actually display captions. If a caption is shifted to a row
13093 // with a lower index than this, it is cleared from the display
13094 // buffer
13095
13096 this.topRow_ = 0;
13097 this.startPts_ = 0;
13098 this.displayed_ = createDisplayBuffer();
13099 this.nonDisplayed_ = createDisplayBuffer();
13100 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
13101
13102 this.column_ = 0;
13103 this.row_ = BOTTOM_ROW;
13104 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
13105
13106 this.formatting_ = [];
13107 };
13108 /**
13109 * Sets up control code and related constants for this instance
13110 */
13111
13112
13113 Cea608Stream.prototype.setConstants = function () {
13114 // The following attributes have these uses:
13115 // ext_ : char0 for mid-row codes, and the base for extended
13116 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
13117 // extended codes)
13118 // control_: char0 for control codes, except byte-shifted to the
13119 // left so that we can do this.control_ | CONTROL_CODE
13120 // offset_: char0 for tab offset codes
13121 //
13122 // It's also worth noting that control codes, and _only_ control codes,
13123 // differ between field 1 and field2. Field 2 control codes are always
13124 // their field 1 value plus 1. That's why there's the "| field" on the
13125 // control value.
13126 if (this.dataChannel_ === 0) {
13127 this.BASE_ = 0x10;
13128 this.EXT_ = 0x11;
13129 this.CONTROL_ = (0x14 | this.field_) << 8;
13130 this.OFFSET_ = 0x17;
13131 } else if (this.dataChannel_ === 1) {
13132 this.BASE_ = 0x18;
13133 this.EXT_ = 0x19;
13134 this.CONTROL_ = (0x1c | this.field_) << 8;
13135 this.OFFSET_ = 0x1f;
13136 } // Constants for the LSByte command codes recognized by Cea608Stream. This
13137 // list is not exhaustive. For a more comprehensive listing and semantics see
13138 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
13139 // Padding
13140
13141
13142 this.PADDING_ = 0x0000; // Pop-on Mode
13143
13144 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
13145 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
13146
13147 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
13148 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
13149 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
13150 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
13151
13152 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
13153
13154 this.BACKSPACE_ = this.CONTROL_ | 0x21;
13155 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
13156 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
13157 };
13158 /**
13159 * Detects if the 2-byte packet data is a special character
13160 *
13161 * Special characters have a second byte in the range 0x30 to 0x3f,
13162 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
13163 * data channel 2).
13164 *
13165 * @param {Integer} char0 The first byte
13166 * @param {Integer} char1 The second byte
13167 * @return {Boolean} Whether the 2 bytes are an special character
13168 */
13169
13170
13171 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
13172 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
13173 };
13174 /**
13175 * Detects if the 2-byte packet data is an extended character
13176 *
13177 * Extended characters have a second byte in the range 0x20 to 0x3f,
13178 * with the first byte being 0x12 or 0x13 (for data channel 1) or
13179 * 0x1a or 0x1b (for data channel 2).
13180 *
13181 * @param {Integer} char0 The first byte
13182 * @param {Integer} char1 The second byte
13183 * @return {Boolean} Whether the 2 bytes are an extended character
13184 */
13185
13186
13187 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
13188 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
13189 };
13190 /**
13191 * Detects if the 2-byte packet is a mid-row code
13192 *
13193 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
13194 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
13195 * channel 2).
13196 *
13197 * @param {Integer} char0 The first byte
13198 * @param {Integer} char1 The second byte
13199 * @return {Boolean} Whether the 2 bytes are a mid-row code
13200 */
13201
13202
13203 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
13204 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
13205 };
13206 /**
13207 * Detects if the 2-byte packet is an offset control code
13208 *
13209 * Offset control codes have a second byte in the range 0x21 to 0x23,
13210 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
13211 * data channel 2).
13212 *
13213 * @param {Integer} char0 The first byte
13214 * @param {Integer} char1 The second byte
13215 * @return {Boolean} Whether the 2 bytes are an offset control code
13216 */
13217
13218
13219 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
13220 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
13221 };
13222 /**
13223 * Detects if the 2-byte packet is a Preamble Address Code
13224 *
13225 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
13226 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
13227 * range 0x40 to 0x7f.
13228 *
13229 * @param {Integer} char0 The first byte
13230 * @param {Integer} char1 The second byte
13231 * @return {Boolean} Whether the 2 bytes are a PAC
13232 */
13233
13234
13235 Cea608Stream.prototype.isPAC = function (char0, char1) {
13236 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
13237 };
13238 /**
13239 * Detects if a packet's second byte is in the range of a PAC color code
13240 *
13241 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
13242 * 0x60 to 0x6f.
13243 *
13244 * @param {Integer} char1 The second byte
13245 * @return {Boolean} Whether the byte is a color PAC
13246 */
13247
13248
13249 Cea608Stream.prototype.isColorPAC = function (char1) {
13250 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
13251 };
13252 /**
13253 * Detects if a single byte is in the range of a normal character
13254 *
13255 * Normal text bytes are in the range 0x20 to 0x7f.
13256 *
13257 * @param {Integer} char The byte
13258 * @return {Boolean} Whether the byte is a normal character
13259 */
13260
13261
13262 Cea608Stream.prototype.isNormalChar = function (char) {
13263 return char >= 0x20 && char <= 0x7f;
13264 };
13265 /**
13266 * Configures roll-up
13267 *
13268 * @param {Integer} pts Current PTS
13269 * @param {Integer} newBaseRow Used by PACs to slide the current window to
13270 * a new position
13271 */
13272
13273
13274 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
13275 // Reset the base row to the bottom row when switching modes
13276 if (this.mode_ !== 'rollUp') {
13277 this.row_ = BOTTOM_ROW;
13278 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
13279
13280 this.flushDisplayed(pts);
13281 this.nonDisplayed_ = createDisplayBuffer();
13282 this.displayed_ = createDisplayBuffer();
13283 }
13284
13285 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
13286 // move currently displayed captions (up or down) to the new base row
13287 for (var i = 0; i < this.rollUpRows_; i++) {
13288 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
13289 this.displayed_[this.row_ - i] = '';
13290 }
13291 }
13292
13293 if (newBaseRow === undefined) {
13294 newBaseRow = this.row_;
13295 }
13296
13297 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
13298 }; // Adds the opening HTML tag for the passed character to the caption text,
13299 // and keeps track of it for later closing
13300
13301
13302 Cea608Stream.prototype.addFormatting = function (pts, format) {
13303 this.formatting_ = this.formatting_.concat(format);
13304 var text = format.reduce(function (text, format) {
13305 return text + '<' + format + '>';
13306 }, '');
13307 this[this.mode_](pts, text);
13308 }; // Adds HTML closing tags for current formatting to caption text and
13309 // clears remembered formatting
13310
13311
13312 Cea608Stream.prototype.clearFormatting = function (pts) {
13313 if (!this.formatting_.length) {
13314 return;
13315 }
13316
13317 var text = this.formatting_.reverse().reduce(function (text, format) {
13318 return text + '</' + format + '>';
13319 }, '');
13320 this.formatting_ = [];
13321 this[this.mode_](pts, text);
13322 }; // Mode Implementations
13323
13324
13325 Cea608Stream.prototype.popOn = function (pts, text) {
13326 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
13327
13328 baseRow += text;
13329 this.nonDisplayed_[this.row_] = baseRow;
13330 };
13331
13332 Cea608Stream.prototype.rollUp = function (pts, text) {
13333 var baseRow = this.displayed_[this.row_];
13334 baseRow += text;
13335 this.displayed_[this.row_] = baseRow;
13336 };
13337
13338 Cea608Stream.prototype.shiftRowsUp_ = function () {
13339 var i; // clear out inactive rows
13340
13341 for (i = 0; i < this.topRow_; i++) {
13342 this.displayed_[i] = '';
13343 }
13344
13345 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
13346 this.displayed_[i] = '';
13347 } // shift displayed rows up
13348
13349
13350 for (i = this.topRow_; i < this.row_; i++) {
13351 this.displayed_[i] = this.displayed_[i + 1];
13352 } // clear out the bottom row
13353
13354
13355 this.displayed_[this.row_] = '';
13356 };
13357
13358 Cea608Stream.prototype.paintOn = function (pts, text) {
13359 var baseRow = this.displayed_[this.row_];
13360 baseRow += text;
13361 this.displayed_[this.row_] = baseRow;
13362 }; // exports
13363
13364
13365 var captionStream = {
13366 CaptionStream: CaptionStream$1,
13367 Cea608Stream: Cea608Stream,
13368 Cea708Stream: Cea708Stream
13369 };
13370 /**
13371 * mux.js
13372 *
13373 * Copyright (c) Brightcove
13374 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
13375 */
13376
13377 var streamTypes = {
13378 H264_STREAM_TYPE: 0x1B,
13379 ADTS_STREAM_TYPE: 0x0F,
13380 METADATA_STREAM_TYPE: 0x15
13381 };
13382 var MAX_TS = 8589934592;
13383 var RO_THRESH = 4294967296;
13384 var TYPE_SHARED = 'shared';
13385
13386 var handleRollover$1 = function handleRollover(value, reference) {
13387 var direction = 1;
13388
13389 if (value > reference) {
13390 // If the current timestamp value is greater than our reference timestamp and we detect a
13391 // timestamp rollover, this means the roll over is happening in the opposite direction.
13392 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
13393 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
13394 // rollover point. In loading this segment, the timestamp values will be very large,
13395 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
13396 // the time stamp to be `value - 2^33`.
13397 direction = -1;
13398 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
13399 // cause an incorrect adjustment.
13400
13401
13402 while (Math.abs(reference - value) > RO_THRESH) {
13403 value += direction * MAX_TS;
13404 }
13405
13406 return value;
13407 };
13408
13409 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
13410 var lastDTS, referenceDTS;
13411 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
13412 // video and audio. We could use `undefined` here, but having a string
13413 // makes debugging a little clearer.
13414
13415 this.type_ = type || TYPE_SHARED;
13416
13417 this.push = function (data) {
13418 // Any "shared" rollover streams will accept _all_ data. Otherwise,
13419 // streams will only accept data that matches their type.
13420 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
13421 return;
13422 }
13423
13424 if (referenceDTS === undefined) {
13425 referenceDTS = data.dts;
13426 }
13427
13428 data.dts = handleRollover$1(data.dts, referenceDTS);
13429 data.pts = handleRollover$1(data.pts, referenceDTS);
13430 lastDTS = data.dts;
13431 this.trigger('data', data);
13432 };
13433
13434 this.flush = function () {
13435 referenceDTS = lastDTS;
13436 this.trigger('done');
13437 };
13438
13439 this.endTimeline = function () {
13440 this.flush();
13441 this.trigger('endedtimeline');
13442 };
13443
13444 this.discontinuity = function () {
13445 referenceDTS = void 0;
13446 lastDTS = void 0;
13447 };
13448
13449 this.reset = function () {
13450 this.discontinuity();
13451 this.trigger('reset');
13452 };
13453 };
13454
13455 TimestampRolloverStream$1.prototype = new stream();
13456 var timestampRolloverStream = {
13457 TimestampRolloverStream: TimestampRolloverStream$1,
13458 handleRollover: handleRollover$1
13459 };
13460
13461 var percentEncode$1 = function percentEncode(bytes, start, end) {
13462 var i,
13463 result = '';
13464
13465 for (i = start; i < end; i++) {
13466 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
13467 }
13468
13469 return result;
13470 },
13471 // return the string representation of the specified byte range,
13472 // interpreted as UTf-8.
13473 parseUtf8 = function parseUtf8(bytes, start, end) {
13474 return decodeURIComponent(percentEncode$1(bytes, start, end));
13475 },
13476 // return the string representation of the specified byte range,
13477 // interpreted as ISO-8859-1.
13478 parseIso88591$1 = function parseIso88591(bytes, start, end) {
13479 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
13480 },
13481 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
13482 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
13483 },
13484 tagParsers = {
13485 TXXX: function TXXX(tag) {
13486 var i;
13487
13488 if (tag.data[0] !== 3) {
13489 // ignore frames with unrecognized character encodings
13490 return;
13491 }
13492
13493 for (i = 1; i < tag.data.length; i++) {
13494 if (tag.data[i] === 0) {
13495 // parse the text fields
13496 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
13497
13498 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
13499 break;
13500 }
13501 }
13502
13503 tag.data = tag.value;
13504 },
13505 WXXX: function WXXX(tag) {
13506 var i;
13507
13508 if (tag.data[0] !== 3) {
13509 // ignore frames with unrecognized character encodings
13510 return;
13511 }
13512
13513 for (i = 1; i < tag.data.length; i++) {
13514 if (tag.data[i] === 0) {
13515 // parse the description and URL fields
13516 tag.description = parseUtf8(tag.data, 1, i);
13517 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
13518 break;
13519 }
13520 }
13521 },
13522 PRIV: function PRIV(tag) {
13523 var i;
13524
13525 for (i = 0; i < tag.data.length; i++) {
13526 if (tag.data[i] === 0) {
13527 // parse the description and URL fields
13528 tag.owner = parseIso88591$1(tag.data, 0, i);
13529 break;
13530 }
13531 }
13532
13533 tag.privateData = tag.data.subarray(i + 1);
13534 tag.data = tag.privateData;
13535 }
13536 },
13537 _MetadataStream;
13538
13539 _MetadataStream = function MetadataStream(options) {
13540 var settings = {
13541 // the bytes of the program-level descriptor field in MP2T
13542 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
13543 // program element descriptors"
13544 descriptor: options && options.descriptor
13545 },
13546 // the total size in bytes of the ID3 tag being parsed
13547 tagSize = 0,
13548 // tag data that is not complete enough to be parsed
13549 buffer = [],
13550 // the total number of bytes currently in the buffer
13551 bufferSize = 0,
13552 i;
13553
13554 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
13555 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
13556
13557
13558 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
13559
13560 if (settings.descriptor) {
13561 for (i = 0; i < settings.descriptor.length; i++) {
13562 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
13563 }
13564 }
13565
13566 this.push = function (chunk) {
13567 var tag, frameStart, frameSize, frame, i, frameHeader;
13568
13569 if (chunk.type !== 'timed-metadata') {
13570 return;
13571 } // if data_alignment_indicator is set in the PES header,
13572 // we must have the start of a new ID3 tag. Assume anything
13573 // remaining in the buffer was malformed and throw it out
13574
13575
13576 if (chunk.dataAlignmentIndicator) {
13577 bufferSize = 0;
13578 buffer.length = 0;
13579 } // ignore events that don't look like ID3 data
13580
13581
13582 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
13583 this.trigger('log', {
13584 level: 'warn',
13585 message: 'Skipping unrecognized metadata packet'
13586 });
13587 return;
13588 } // add this chunk to the data we've collected so far
13589
13590
13591 buffer.push(chunk);
13592 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
13593
13594 if (buffer.length === 1) {
13595 // the frame size is transmitted as a 28-bit integer in the
13596 // last four bytes of the ID3 header.
13597 // The most significant bit of each byte is dropped and the
13598 // results concatenated to recover the actual value.
13599 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
13600 // convenient for our comparisons to include it
13601
13602 tagSize += 10;
13603 } // if the entire frame has not arrived, wait for more data
13604
13605
13606 if (bufferSize < tagSize) {
13607 return;
13608 } // collect the entire frame so it can be parsed
13609
13610
13611 tag = {
13612 data: new Uint8Array(tagSize),
13613 frames: [],
13614 pts: buffer[0].pts,
13615 dts: buffer[0].dts
13616 };
13617
13618 for (i = 0; i < tagSize;) {
13619 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
13620 i += buffer[0].data.byteLength;
13621 bufferSize -= buffer[0].data.byteLength;
13622 buffer.shift();
13623 } // find the start of the first frame and the end of the tag
13624
13625
13626 frameStart = 10;
13627
13628 if (tag.data[5] & 0x40) {
13629 // advance the frame start past the extended header
13630 frameStart += 4; // header size field
13631
13632 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
13633
13634 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
13635 } // parse one or more ID3 frames
13636 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
13637
13638
13639 do {
13640 // determine the number of bytes in this frame
13641 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
13642
13643 if (frameSize < 1) {
13644 this.trigger('log', {
13645 level: 'warn',
13646 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
13647 });
13648 return;
13649 }
13650
13651 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
13652 frame = {
13653 id: frameHeader,
13654 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
13655 };
13656 frame.key = frame.id;
13657
13658 if (tagParsers[frame.id]) {
13659 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
13660 // time for raw AAC data
13661
13662 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
13663 var d = frame.data,
13664 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
13665 size *= 4;
13666 size += d[7] & 0x03;
13667 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
13668 // on the value of this frame
13669 // we couldn't have known the appropriate pts and dts before
13670 // parsing this ID3 tag so set those values now
13671
13672 if (tag.pts === undefined && tag.dts === undefined) {
13673 tag.pts = frame.timeStamp;
13674 tag.dts = frame.timeStamp;
13675 }
13676
13677 this.trigger('timestamp', frame);
13678 }
13679 }
13680
13681 tag.frames.push(frame);
13682 frameStart += 10; // advance past the frame header
13683
13684 frameStart += frameSize; // advance past the frame body
13685 } while (frameStart < tagSize);
13686
13687 this.trigger('data', tag);
13688 };
13689 };
13690
13691 _MetadataStream.prototype = new stream();
13692 var metadataStream = _MetadataStream;
13693 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
13694
13695 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
13696
13697
13698 var MP2T_PACKET_LENGTH$1 = 188,
13699 // bytes
13700 SYNC_BYTE$1 = 0x47;
13701 /**
13702 * Splits an incoming stream of binary data into MPEG-2 Transport
13703 * Stream packets.
13704 */
13705
13706 _TransportPacketStream = function TransportPacketStream() {
13707 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
13708 bytesInBuffer = 0;
13709
13710 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
13711
13712 /**
13713 * Split a stream of data into M2TS packets
13714 **/
13715
13716
13717 this.push = function (bytes) {
13718 var startIndex = 0,
13719 endIndex = MP2T_PACKET_LENGTH$1,
13720 everything; // If there are bytes remaining from the last segment, prepend them to the
13721 // bytes that were pushed in
13722
13723 if (bytesInBuffer) {
13724 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
13725 everything.set(buffer.subarray(0, bytesInBuffer));
13726 everything.set(bytes, bytesInBuffer);
13727 bytesInBuffer = 0;
13728 } else {
13729 everything = bytes;
13730 } // While we have enough data for a packet
13731
13732
13733 while (endIndex < everything.byteLength) {
13734 // Look for a pair of start and end sync bytes in the data..
13735 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
13736 // We found a packet so emit it and jump one whole packet forward in
13737 // the stream
13738 this.trigger('data', everything.subarray(startIndex, endIndex));
13739 startIndex += MP2T_PACKET_LENGTH$1;
13740 endIndex += MP2T_PACKET_LENGTH$1;
13741 continue;
13742 } // If we get here, we have somehow become de-synchronized and we need to step
13743 // forward one byte at a time until we find a pair of sync bytes that denote
13744 // a packet
13745
13746
13747 startIndex++;
13748 endIndex++;
13749 } // If there was some data left over at the end of the segment that couldn't
13750 // possibly be a whole packet, keep it because it might be the start of a packet
13751 // that continues in the next segment
13752
13753
13754 if (startIndex < everything.byteLength) {
13755 buffer.set(everything.subarray(startIndex), 0);
13756 bytesInBuffer = everything.byteLength - startIndex;
13757 }
13758 };
13759 /**
13760 * Passes identified M2TS packets to the TransportParseStream to be parsed
13761 **/
13762
13763
13764 this.flush = function () {
13765 // If the buffer contains a whole packet when we are being flushed, emit it
13766 // and empty the buffer. Otherwise hold onto the data because it may be
13767 // important for decoding the next segment
13768 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
13769 this.trigger('data', buffer);
13770 bytesInBuffer = 0;
13771 }
13772
13773 this.trigger('done');
13774 };
13775
13776 this.endTimeline = function () {
13777 this.flush();
13778 this.trigger('endedtimeline');
13779 };
13780
13781 this.reset = function () {
13782 bytesInBuffer = 0;
13783 this.trigger('reset');
13784 };
13785 };
13786
13787 _TransportPacketStream.prototype = new stream();
13788 /**
13789 * Accepts an MP2T TransportPacketStream and emits data events with parsed
13790 * forms of the individual transport stream packets.
13791 */
13792
13793 _TransportParseStream = function TransportParseStream() {
13794 var parsePsi, parsePat, parsePmt, self;
13795
13796 _TransportParseStream.prototype.init.call(this);
13797
13798 self = this;
13799 this.packetsWaitingForPmt = [];
13800 this.programMapTable = undefined;
13801
13802 parsePsi = function parsePsi(payload, psi) {
13803 var offset = 0; // PSI packets may be split into multiple sections and those
13804 // sections may be split into multiple packets. If a PSI
13805 // section starts in this packet, the payload_unit_start_indicator
13806 // will be true and the first byte of the payload will indicate
13807 // the offset from the current position to the start of the
13808 // section.
13809
13810 if (psi.payloadUnitStartIndicator) {
13811 offset += payload[offset] + 1;
13812 }
13813
13814 if (psi.type === 'pat') {
13815 parsePat(payload.subarray(offset), psi);
13816 } else {
13817 parsePmt(payload.subarray(offset), psi);
13818 }
13819 };
13820
13821 parsePat = function parsePat(payload, pat) {
13822 pat.section_number = payload[7]; // eslint-disable-line camelcase
13823
13824 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
13825 // skip the PSI header and parse the first PMT entry
13826
13827 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
13828 pat.pmtPid = self.pmtPid;
13829 };
13830 /**
13831 * Parse out the relevant fields of a Program Map Table (PMT).
13832 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
13833 * packet. The first byte in this array should be the table_id
13834 * field.
13835 * @param pmt {object} the object that should be decorated with
13836 * fields parsed from the PMT.
13837 */
13838
13839
13840 parsePmt = function parsePmt(payload, pmt) {
13841 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
13842 // take effect. We don't believe this should ever be the case
13843 // for HLS but we'll ignore "forward" PMT declarations if we see
13844 // them. Future PMT declarations have the current_next_indicator
13845 // set to zero.
13846
13847 if (!(payload[5] & 0x01)) {
13848 return;
13849 } // overwrite any existing program map table
13850
13851
13852 self.programMapTable = {
13853 video: null,
13854 audio: null,
13855 'timed-metadata': {}
13856 }; // the mapping table ends at the end of the current section
13857
13858 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
13859 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
13860 // long the program info descriptors are
13861
13862 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
13863
13864 offset = 12 + programInfoLength;
13865
13866 while (offset < tableEnd) {
13867 var streamType = payload[offset];
13868 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
13869 // TODO: should this be done for metadata too? for now maintain behavior of
13870 // multiple metadata streams
13871
13872 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
13873 self.programMapTable.video = pid;
13874 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
13875 self.programMapTable.audio = pid;
13876 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
13877 // map pid to stream type for metadata streams
13878 self.programMapTable['timed-metadata'][pid] = streamType;
13879 } // move to the next table entry
13880 // skip past the elementary stream descriptors, if present
13881
13882
13883 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
13884 } // record the map on the packet as well
13885
13886
13887 pmt.programMapTable = self.programMapTable;
13888 };
13889 /**
13890 * Deliver a new MP2T packet to the next stream in the pipeline.
13891 */
13892
13893
13894 this.push = function (packet) {
13895 var result = {},
13896 offset = 4;
13897 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
13898
13899 result.pid = packet[1] & 0x1f;
13900 result.pid <<= 8;
13901 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
13902 // fifth byte of the TS packet header. The adaptation field is
13903 // used to add stuffing to PES packets that don't fill a complete
13904 // TS packet, and to specify some forms of timing and control data
13905 // that we do not currently use.
13906
13907 if ((packet[3] & 0x30) >>> 4 > 0x01) {
13908 offset += packet[offset] + 1;
13909 } // parse the rest of the packet based on the type
13910
13911
13912 if (result.pid === 0) {
13913 result.type = 'pat';
13914 parsePsi(packet.subarray(offset), result);
13915 this.trigger('data', result);
13916 } else if (result.pid === this.pmtPid) {
13917 result.type = 'pmt';
13918 parsePsi(packet.subarray(offset), result);
13919 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
13920
13921 while (this.packetsWaitingForPmt.length) {
13922 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
13923 }
13924 } else if (this.programMapTable === undefined) {
13925 // When we have not seen a PMT yet, defer further processing of
13926 // PES packets until one has been parsed
13927 this.packetsWaitingForPmt.push([packet, offset, result]);
13928 } else {
13929 this.processPes_(packet, offset, result);
13930 }
13931 };
13932
13933 this.processPes_ = function (packet, offset, result) {
13934 // set the appropriate stream type
13935 if (result.pid === this.programMapTable.video) {
13936 result.streamType = streamTypes.H264_STREAM_TYPE;
13937 } else if (result.pid === this.programMapTable.audio) {
13938 result.streamType = streamTypes.ADTS_STREAM_TYPE;
13939 } else {
13940 // if not video or audio, it is timed-metadata or unknown
13941 // if unknown, streamType will be undefined
13942 result.streamType = this.programMapTable['timed-metadata'][result.pid];
13943 }
13944
13945 result.type = 'pes';
13946 result.data = packet.subarray(offset);
13947 this.trigger('data', result);
13948 };
13949 };
13950
13951 _TransportParseStream.prototype = new stream();
13952 _TransportParseStream.STREAM_TYPES = {
13953 h264: 0x1b,
13954 adts: 0x0f
13955 };
13956 /**
13957 * Reconsistutes program elementary stream (PES) packets from parsed
13958 * transport stream packets. That is, if you pipe an
13959 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
13960 * events will be events which capture the bytes for individual PES
13961 * packets plus relevant metadata that has been extracted from the
13962 * container.
13963 */
13964
13965 _ElementaryStream = function ElementaryStream() {
13966 var self = this,
13967 segmentHadPmt = false,
13968 // PES packet fragments
13969 video = {
13970 data: [],
13971 size: 0
13972 },
13973 audio = {
13974 data: [],
13975 size: 0
13976 },
13977 timedMetadata = {
13978 data: [],
13979 size: 0
13980 },
13981 programMapTable,
13982 parsePes = function parsePes(payload, pes) {
13983 var ptsDtsFlags;
13984 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
13985
13986 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
13987 // that are frame data that is continuing from the previous fragment. This
13988 // is to check that the pes data is the start of a new pes payload
13989
13990 if (startPrefix !== 1) {
13991 return;
13992 } // get the packet length, this will be 0 for video
13993
13994
13995 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
13996
13997 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
13998 // and a DTS value. Determine what combination of values is
13999 // available to work with.
14000
14001 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
14002 // performs all bitwise operations on 32-bit integers but javascript
14003 // supports a much greater range (52-bits) of integer using standard
14004 // mathematical operations.
14005 // We construct a 31-bit value using bitwise operators over the 31
14006 // most significant bits and then multiply by 4 (equal to a left-shift
14007 // of 2) before we add the final 2 least significant bits of the
14008 // timestamp (equal to an OR.)
14009
14010 if (ptsDtsFlags & 0xC0) {
14011 // the PTS and DTS are not written out directly. For information
14012 // on how they are encoded, see
14013 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
14014 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
14015 pes.pts *= 4; // Left shift by 2
14016
14017 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
14018
14019 pes.dts = pes.pts;
14020
14021 if (ptsDtsFlags & 0x40) {
14022 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
14023 pes.dts *= 4; // Left shift by 2
14024
14025 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
14026 }
14027 } // the data section starts immediately after the PES header.
14028 // pes_header_data_length specifies the number of header bytes
14029 // that follow the last byte of the field.
14030
14031
14032 pes.data = payload.subarray(9 + payload[8]);
14033 },
14034
14035 /**
14036 * Pass completely parsed PES packets to the next stream in the pipeline
14037 **/
14038 flushStream = function flushStream(stream, type, forceFlush) {
14039 var packetData = new Uint8Array(stream.size),
14040 event = {
14041 type: type
14042 },
14043 i = 0,
14044 offset = 0,
14045 packetFlushable = false,
14046 fragment; // do nothing if there is not enough buffered data for a complete
14047 // PES header
14048
14049 if (!stream.data.length || stream.size < 9) {
14050 return;
14051 }
14052
14053 event.trackId = stream.data[0].pid; // reassemble the packet
14054
14055 for (i = 0; i < stream.data.length; i++) {
14056 fragment = stream.data[i];
14057 packetData.set(fragment.data, offset);
14058 offset += fragment.data.byteLength;
14059 } // parse assembled packet's PES header
14060
14061
14062 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
14063 // check that there is enough stream data to fill the packet
14064
14065 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
14066
14067 if (forceFlush || packetFlushable) {
14068 stream.size = 0;
14069 stream.data.length = 0;
14070 } // only emit packets that are complete. this is to avoid assembling
14071 // incomplete PES packets due to poor segmentation
14072
14073
14074 if (packetFlushable) {
14075 self.trigger('data', event);
14076 }
14077 };
14078
14079 _ElementaryStream.prototype.init.call(this);
14080 /**
14081 * Identifies M2TS packet types and parses PES packets using metadata
14082 * parsed from the PMT
14083 **/
14084
14085
14086 this.push = function (data) {
14087 ({
14088 pat: function pat() {// we have to wait for the PMT to arrive as well before we
14089 // have any meaningful metadata
14090 },
14091 pes: function pes() {
14092 var stream, streamType;
14093
14094 switch (data.streamType) {
14095 case streamTypes.H264_STREAM_TYPE:
14096 stream = video;
14097 streamType = 'video';
14098 break;
14099
14100 case streamTypes.ADTS_STREAM_TYPE:
14101 stream = audio;
14102 streamType = 'audio';
14103 break;
14104
14105 case streamTypes.METADATA_STREAM_TYPE:
14106 stream = timedMetadata;
14107 streamType = 'timed-metadata';
14108 break;
14109
14110 default:
14111 // ignore unknown stream types
14112 return;
14113 } // if a new packet is starting, we can flush the completed
14114 // packet
14115
14116
14117 if (data.payloadUnitStartIndicator) {
14118 flushStream(stream, streamType, true);
14119 } // buffer this fragment until we are sure we've received the
14120 // complete payload
14121
14122
14123 stream.data.push(data);
14124 stream.size += data.data.byteLength;
14125 },
14126 pmt: function pmt() {
14127 var event = {
14128 type: 'metadata',
14129 tracks: []
14130 };
14131 programMapTable = data.programMapTable; // translate audio and video streams to tracks
14132
14133 if (programMapTable.video !== null) {
14134 event.tracks.push({
14135 timelineStartInfo: {
14136 baseMediaDecodeTime: 0
14137 },
14138 id: +programMapTable.video,
14139 codec: 'avc',
14140 type: 'video'
14141 });
14142 }
14143
14144 if (programMapTable.audio !== null) {
14145 event.tracks.push({
14146 timelineStartInfo: {
14147 baseMediaDecodeTime: 0
14148 },
14149 id: +programMapTable.audio,
14150 codec: 'adts',
14151 type: 'audio'
14152 });
14153 }
14154
14155 segmentHadPmt = true;
14156 self.trigger('data', event);
14157 }
14158 })[data.type]();
14159 };
14160
14161 this.reset = function () {
14162 video.size = 0;
14163 video.data.length = 0;
14164 audio.size = 0;
14165 audio.data.length = 0;
14166 this.trigger('reset');
14167 };
14168 /**
14169 * Flush any remaining input. Video PES packets may be of variable
14170 * length. Normally, the start of a new video packet can trigger the
14171 * finalization of the previous packet. That is not possible if no
14172 * more video is forthcoming, however. In that case, some other
14173 * mechanism (like the end of the file) has to be employed. When it is
14174 * clear that no additional data is forthcoming, calling this method
14175 * will flush the buffered packets.
14176 */
14177
14178
14179 this.flushStreams_ = function () {
14180 // !!THIS ORDER IS IMPORTANT!!
14181 // video first then audio
14182 flushStream(video, 'video');
14183 flushStream(audio, 'audio');
14184 flushStream(timedMetadata, 'timed-metadata');
14185 };
14186
14187 this.flush = function () {
14188 // if on flush we haven't had a pmt emitted
14189 // and we have a pmt to emit. emit the pmt
14190 // so that we trigger a trackinfo downstream.
14191 if (!segmentHadPmt && programMapTable) {
14192 var pmt = {
14193 type: 'metadata',
14194 tracks: []
14195 }; // translate audio and video streams to tracks
14196
14197 if (programMapTable.video !== null) {
14198 pmt.tracks.push({
14199 timelineStartInfo: {
14200 baseMediaDecodeTime: 0
14201 },
14202 id: +programMapTable.video,
14203 codec: 'avc',
14204 type: 'video'
14205 });
14206 }
14207
14208 if (programMapTable.audio !== null) {
14209 pmt.tracks.push({
14210 timelineStartInfo: {
14211 baseMediaDecodeTime: 0
14212 },
14213 id: +programMapTable.audio,
14214 codec: 'adts',
14215 type: 'audio'
14216 });
14217 }
14218
14219 self.trigger('data', pmt);
14220 }
14221
14222 segmentHadPmt = false;
14223 this.flushStreams_();
14224 this.trigger('done');
14225 };
14226 };
14227
14228 _ElementaryStream.prototype = new stream();
14229 var m2ts = {
14230 PAT_PID: 0x0000,
14231 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
14232 TransportPacketStream: _TransportPacketStream,
14233 TransportParseStream: _TransportParseStream,
14234 ElementaryStream: _ElementaryStream,
14235 TimestampRolloverStream: TimestampRolloverStream,
14236 CaptionStream: captionStream.CaptionStream,
14237 Cea608Stream: captionStream.Cea608Stream,
14238 Cea708Stream: captionStream.Cea708Stream,
14239 MetadataStream: metadataStream
14240 };
14241
14242 for (var type in streamTypes) {
14243 if (streamTypes.hasOwnProperty(type)) {
14244 m2ts[type] = streamTypes[type];
14245 }
14246 }
14247
14248 var m2ts_1 = m2ts;
14249 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
14250
14251 var _AdtsStream;
14252
14253 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
14254 /*
14255 * Accepts a ElementaryStream and emits data events with parsed
14256 * AAC Audio Frames of the individual packets. Input audio in ADTS
14257 * format is unpacked and re-emitted as AAC frames.
14258 *
14259 * @see http://wiki.multimedia.cx/index.php?title=ADTS
14260 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
14261 */
14262
14263 _AdtsStream = function AdtsStream(handlePartialSegments) {
14264 var buffer,
14265 frameNum = 0;
14266
14267 _AdtsStream.prototype.init.call(this);
14268
14269 this.skipWarn_ = function (start, end) {
14270 this.trigger('log', {
14271 level: 'warn',
14272 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
14273 });
14274 };
14275
14276 this.push = function (packet) {
14277 var i = 0,
14278 frameLength,
14279 protectionSkipBytes,
14280 oldBuffer,
14281 sampleCount,
14282 adtsFrameDuration;
14283
14284 if (!handlePartialSegments) {
14285 frameNum = 0;
14286 }
14287
14288 if (packet.type !== 'audio') {
14289 // ignore non-audio data
14290 return;
14291 } // Prepend any data in the buffer to the input data so that we can parse
14292 // aac frames the cross a PES packet boundary
14293
14294
14295 if (buffer && buffer.length) {
14296 oldBuffer = buffer;
14297 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
14298 buffer.set(oldBuffer);
14299 buffer.set(packet.data, oldBuffer.byteLength);
14300 } else {
14301 buffer = packet.data;
14302 } // unpack any ADTS frames which have been fully received
14303 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
14304
14305
14306 var skip; // We use i + 7 here because we want to be able to parse the entire header.
14307 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
14308
14309 while (i + 7 < buffer.length) {
14310 // Look for the start of an ADTS header..
14311 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
14312 if (typeof skip !== 'number') {
14313 skip = i;
14314 } // If a valid header was not found, jump one forward and attempt to
14315 // find a valid ADTS header starting at the next byte
14316
14317
14318 i++;
14319 continue;
14320 }
14321
14322 if (typeof skip === 'number') {
14323 this.skipWarn_(skip, i);
14324 skip = null;
14325 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
14326 // end of the ADTS header
14327
14328
14329 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
14330 // end of the sync sequence
14331 // NOTE: frame length includes the size of the header
14332
14333 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
14334 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
14335 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
14336 // then we have to wait for more data
14337
14338 if (buffer.byteLength - i < frameLength) {
14339 break;
14340 } // Otherwise, deliver the complete AAC frame
14341
14342
14343 this.trigger('data', {
14344 pts: packet.pts + frameNum * adtsFrameDuration,
14345 dts: packet.dts + frameNum * adtsFrameDuration,
14346 sampleCount: sampleCount,
14347 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
14348 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
14349 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
14350 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
14351 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
14352 samplesize: 16,
14353 // data is the frame without it's header
14354 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
14355 });
14356 frameNum++;
14357 i += frameLength;
14358 }
14359
14360 if (typeof skip === 'number') {
14361 this.skipWarn_(skip, i);
14362 skip = null;
14363 } // remove processed bytes from the buffer.
14364
14365
14366 buffer = buffer.subarray(i);
14367 };
14368
14369 this.flush = function () {
14370 frameNum = 0;
14371 this.trigger('done');
14372 };
14373
14374 this.reset = function () {
14375 buffer = void 0;
14376 this.trigger('reset');
14377 };
14378
14379 this.endTimeline = function () {
14380 buffer = void 0;
14381 this.trigger('endedtimeline');
14382 };
14383 };
14384
14385 _AdtsStream.prototype = new stream();
14386 var adts = _AdtsStream;
14387 /**
14388 * mux.js
14389 *
14390 * Copyright (c) Brightcove
14391 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14392 */
14393
14394 var ExpGolomb;
14395 /**
14396 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
14397 * scheme used by h264.
14398 */
14399
14400 ExpGolomb = function ExpGolomb(workingData) {
14401 var // the number of bytes left to examine in workingData
14402 workingBytesAvailable = workingData.byteLength,
14403 // the current word being examined
14404 workingWord = 0,
14405 // :uint
14406 // the number of bits left to examine in the current word
14407 workingBitsAvailable = 0; // :uint;
14408 // ():uint
14409
14410 this.length = function () {
14411 return 8 * workingBytesAvailable;
14412 }; // ():uint
14413
14414
14415 this.bitsAvailable = function () {
14416 return 8 * workingBytesAvailable + workingBitsAvailable;
14417 }; // ():void
14418
14419
14420 this.loadWord = function () {
14421 var position = workingData.byteLength - workingBytesAvailable,
14422 workingBytes = new Uint8Array(4),
14423 availableBytes = Math.min(4, workingBytesAvailable);
14424
14425 if (availableBytes === 0) {
14426 throw new Error('no bytes available');
14427 }
14428
14429 workingBytes.set(workingData.subarray(position, position + availableBytes));
14430 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
14431
14432 workingBitsAvailable = availableBytes * 8;
14433 workingBytesAvailable -= availableBytes;
14434 }; // (count:int):void
14435
14436
14437 this.skipBits = function (count) {
14438 var skipBytes; // :int
14439
14440 if (workingBitsAvailable > count) {
14441 workingWord <<= count;
14442 workingBitsAvailable -= count;
14443 } else {
14444 count -= workingBitsAvailable;
14445 skipBytes = Math.floor(count / 8);
14446 count -= skipBytes * 8;
14447 workingBytesAvailable -= skipBytes;
14448 this.loadWord();
14449 workingWord <<= count;
14450 workingBitsAvailable -= count;
14451 }
14452 }; // (size:int):uint
14453
14454
14455 this.readBits = function (size) {
14456 var bits = Math.min(workingBitsAvailable, size),
14457 // :uint
14458 valu = workingWord >>> 32 - bits; // :uint
14459 // if size > 31, handle error
14460
14461 workingBitsAvailable -= bits;
14462
14463 if (workingBitsAvailable > 0) {
14464 workingWord <<= bits;
14465 } else if (workingBytesAvailable > 0) {
14466 this.loadWord();
14467 }
14468
14469 bits = size - bits;
14470
14471 if (bits > 0) {
14472 return valu << bits | this.readBits(bits);
14473 }
14474
14475 return valu;
14476 }; // ():uint
14477
14478
14479 this.skipLeadingZeros = function () {
14480 var leadingZeroCount; // :uint
14481
14482 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
14483 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
14484 // the first bit of working word is 1
14485 workingWord <<= leadingZeroCount;
14486 workingBitsAvailable -= leadingZeroCount;
14487 return leadingZeroCount;
14488 }
14489 } // we exhausted workingWord and still have not found a 1
14490
14491
14492 this.loadWord();
14493 return leadingZeroCount + this.skipLeadingZeros();
14494 }; // ():void
14495
14496
14497 this.skipUnsignedExpGolomb = function () {
14498 this.skipBits(1 + this.skipLeadingZeros());
14499 }; // ():void
14500
14501
14502 this.skipExpGolomb = function () {
14503 this.skipBits(1 + this.skipLeadingZeros());
14504 }; // ():uint
14505
14506
14507 this.readUnsignedExpGolomb = function () {
14508 var clz = this.skipLeadingZeros(); // :uint
14509
14510 return this.readBits(clz + 1) - 1;
14511 }; // ():int
14512
14513
14514 this.readExpGolomb = function () {
14515 var valu = this.readUnsignedExpGolomb(); // :int
14516
14517 if (0x01 & valu) {
14518 // the number is odd if the low order bit is set
14519 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
14520 }
14521
14522 return -1 * (valu >>> 1); // divide by two then make it negative
14523 }; // Some convenience functions
14524 // :Boolean
14525
14526
14527 this.readBoolean = function () {
14528 return this.readBits(1) === 1;
14529 }; // ():int
14530
14531
14532 this.readUnsignedByte = function () {
14533 return this.readBits(8);
14534 };
14535
14536 this.loadWord();
14537 };
14538
14539 var expGolomb = ExpGolomb;
14540
14541 var _H264Stream, _NalByteStream;
14542
14543 var PROFILES_WITH_OPTIONAL_SPS_DATA;
14544 /**
14545 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
14546 */
14547
14548 _NalByteStream = function NalByteStream() {
14549 var syncPoint = 0,
14550 i,
14551 buffer;
14552
14553 _NalByteStream.prototype.init.call(this);
14554 /*
14555 * Scans a byte stream and triggers a data event with the NAL units found.
14556 * @param {Object} data Event received from H264Stream
14557 * @param {Uint8Array} data.data The h264 byte stream to be scanned
14558 *
14559 * @see H264Stream.push
14560 */
14561
14562
14563 this.push = function (data) {
14564 var swapBuffer;
14565
14566 if (!buffer) {
14567 buffer = data.data;
14568 } else {
14569 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
14570 swapBuffer.set(buffer);
14571 swapBuffer.set(data.data, buffer.byteLength);
14572 buffer = swapBuffer;
14573 }
14574
14575 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
14576 // scan for NAL unit boundaries
14577 // a match looks like this:
14578 // 0 0 1 .. NAL .. 0 0 1
14579 // ^ sync point ^ i
14580 // or this:
14581 // 0 0 1 .. NAL .. 0 0 0
14582 // ^ sync point ^ i
14583 // advance the sync point to a NAL start, if necessary
14584
14585 for (; syncPoint < len - 3; syncPoint++) {
14586 if (buffer[syncPoint + 2] === 1) {
14587 // the sync point is properly aligned
14588 i = syncPoint + 5;
14589 break;
14590 }
14591 }
14592
14593 while (i < len) {
14594 // look at the current byte to determine if we've hit the end of
14595 // a NAL unit boundary
14596 switch (buffer[i]) {
14597 case 0:
14598 // skip past non-sync sequences
14599 if (buffer[i - 1] !== 0) {
14600 i += 2;
14601 break;
14602 } else if (buffer[i - 2] !== 0) {
14603 i++;
14604 break;
14605 } // deliver the NAL unit if it isn't empty
14606
14607
14608 if (syncPoint + 3 !== i - 2) {
14609 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14610 } // drop trailing zeroes
14611
14612
14613 do {
14614 i++;
14615 } while (buffer[i] !== 1 && i < len);
14616
14617 syncPoint = i - 2;
14618 i += 3;
14619 break;
14620
14621 case 1:
14622 // skip past non-sync sequences
14623 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
14624 i += 3;
14625 break;
14626 } // deliver the NAL unit
14627
14628
14629 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14630 syncPoint = i - 2;
14631 i += 3;
14632 break;
14633
14634 default:
14635 // the current byte isn't a one or zero, so it cannot be part
14636 // of a sync sequence
14637 i += 3;
14638 break;
14639 }
14640 } // filter out the NAL units that were delivered
14641
14642
14643 buffer = buffer.subarray(syncPoint);
14644 i -= syncPoint;
14645 syncPoint = 0;
14646 };
14647
14648 this.reset = function () {
14649 buffer = null;
14650 syncPoint = 0;
14651 this.trigger('reset');
14652 };
14653
14654 this.flush = function () {
14655 // deliver the last buffered NAL unit
14656 if (buffer && buffer.byteLength > 3) {
14657 this.trigger('data', buffer.subarray(syncPoint + 3));
14658 } // reset the stream state
14659
14660
14661 buffer = null;
14662 syncPoint = 0;
14663 this.trigger('done');
14664 };
14665
14666 this.endTimeline = function () {
14667 this.flush();
14668 this.trigger('endedtimeline');
14669 };
14670 };
14671
14672 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
14673 // see Recommendation ITU-T H.264 (4/2013),
14674 // 7.3.2.1.1 Sequence parameter set data syntax
14675
14676 PROFILES_WITH_OPTIONAL_SPS_DATA = {
14677 100: true,
14678 110: true,
14679 122: true,
14680 244: true,
14681 44: true,
14682 83: true,
14683 86: true,
14684 118: true,
14685 128: true,
14686 // TODO: the three profiles below don't
14687 // appear to have sps data in the specificiation anymore?
14688 138: true,
14689 139: true,
14690 134: true
14691 };
14692 /**
14693 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
14694 * events.
14695 */
14696
14697 _H264Stream = function H264Stream() {
14698 var nalByteStream = new _NalByteStream(),
14699 self,
14700 trackId,
14701 currentPts,
14702 currentDts,
14703 discardEmulationPreventionBytes,
14704 readSequenceParameterSet,
14705 skipScalingList;
14706
14707 _H264Stream.prototype.init.call(this);
14708
14709 self = this;
14710 /*
14711 * Pushes a packet from a stream onto the NalByteStream
14712 *
14713 * @param {Object} packet - A packet received from a stream
14714 * @param {Uint8Array} packet.data - The raw bytes of the packet
14715 * @param {Number} packet.dts - Decode timestamp of the packet
14716 * @param {Number} packet.pts - Presentation timestamp of the packet
14717 * @param {Number} packet.trackId - The id of the h264 track this packet came from
14718 * @param {('video'|'audio')} packet.type - The type of packet
14719 *
14720 */
14721
14722 this.push = function (packet) {
14723 if (packet.type !== 'video') {
14724 return;
14725 }
14726
14727 trackId = packet.trackId;
14728 currentPts = packet.pts;
14729 currentDts = packet.dts;
14730 nalByteStream.push(packet);
14731 };
14732 /*
14733 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
14734 * for the NALUs to the next stream component.
14735 * Also, preprocess caption and sequence parameter NALUs.
14736 *
14737 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
14738 * @see NalByteStream.push
14739 */
14740
14741
14742 nalByteStream.on('data', function (data) {
14743 var event = {
14744 trackId: trackId,
14745 pts: currentPts,
14746 dts: currentDts,
14747 data: data,
14748 nalUnitTypeCode: data[0] & 0x1f
14749 };
14750
14751 switch (event.nalUnitTypeCode) {
14752 case 0x05:
14753 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
14754 break;
14755
14756 case 0x06:
14757 event.nalUnitType = 'sei_rbsp';
14758 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14759 break;
14760
14761 case 0x07:
14762 event.nalUnitType = 'seq_parameter_set_rbsp';
14763 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14764 event.config = readSequenceParameterSet(event.escapedRBSP);
14765 break;
14766
14767 case 0x08:
14768 event.nalUnitType = 'pic_parameter_set_rbsp';
14769 break;
14770
14771 case 0x09:
14772 event.nalUnitType = 'access_unit_delimiter_rbsp';
14773 break;
14774 } // This triggers data on the H264Stream
14775
14776
14777 self.trigger('data', event);
14778 });
14779 nalByteStream.on('done', function () {
14780 self.trigger('done');
14781 });
14782 nalByteStream.on('partialdone', function () {
14783 self.trigger('partialdone');
14784 });
14785 nalByteStream.on('reset', function () {
14786 self.trigger('reset');
14787 });
14788 nalByteStream.on('endedtimeline', function () {
14789 self.trigger('endedtimeline');
14790 });
14791
14792 this.flush = function () {
14793 nalByteStream.flush();
14794 };
14795
14796 this.partialFlush = function () {
14797 nalByteStream.partialFlush();
14798 };
14799
14800 this.reset = function () {
14801 nalByteStream.reset();
14802 };
14803
14804 this.endTimeline = function () {
14805 nalByteStream.endTimeline();
14806 };
14807 /**
14808 * Advance the ExpGolomb decoder past a scaling list. The scaling
14809 * list is optionally transmitted as part of a sequence parameter
14810 * set and is not relevant to transmuxing.
14811 * @param count {number} the number of entries in this scaling list
14812 * @param expGolombDecoder {object} an ExpGolomb pointed to the
14813 * start of a scaling list
14814 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14815 */
14816
14817
14818 skipScalingList = function skipScalingList(count, expGolombDecoder) {
14819 var lastScale = 8,
14820 nextScale = 8,
14821 j,
14822 deltaScale;
14823
14824 for (j = 0; j < count; j++) {
14825 if (nextScale !== 0) {
14826 deltaScale = expGolombDecoder.readExpGolomb();
14827 nextScale = (lastScale + deltaScale + 256) % 256;
14828 }
14829
14830 lastScale = nextScale === 0 ? lastScale : nextScale;
14831 }
14832 };
14833 /**
14834 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
14835 * Sequence Payload"
14836 * @param data {Uint8Array} the bytes of a RBSP from a NAL
14837 * unit
14838 * @return {Uint8Array} the RBSP without any Emulation
14839 * Prevention Bytes
14840 */
14841
14842
14843 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
14844 var length = data.byteLength,
14845 emulationPreventionBytesPositions = [],
14846 i = 1,
14847 newLength,
14848 newData; // Find all `Emulation Prevention Bytes`
14849
14850 while (i < length - 2) {
14851 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
14852 emulationPreventionBytesPositions.push(i + 2);
14853 i += 2;
14854 } else {
14855 i++;
14856 }
14857 } // If no Emulation Prevention Bytes were found just return the original
14858 // array
14859
14860
14861 if (emulationPreventionBytesPositions.length === 0) {
14862 return data;
14863 } // Create a new array to hold the NAL unit data
14864
14865
14866 newLength = length - emulationPreventionBytesPositions.length;
14867 newData = new Uint8Array(newLength);
14868 var sourceIndex = 0;
14869
14870 for (i = 0; i < newLength; sourceIndex++, i++) {
14871 if (sourceIndex === emulationPreventionBytesPositions[0]) {
14872 // Skip this byte
14873 sourceIndex++; // Remove this position index
14874
14875 emulationPreventionBytesPositions.shift();
14876 }
14877
14878 newData[i] = data[sourceIndex];
14879 }
14880
14881 return newData;
14882 };
14883 /**
14884 * Read a sequence parameter set and return some interesting video
14885 * properties. A sequence parameter set is the H264 metadata that
14886 * describes the properties of upcoming video frames.
14887 * @param data {Uint8Array} the bytes of a sequence parameter set
14888 * @return {object} an object with configuration parsed from the
14889 * sequence parameter set, including the dimensions of the
14890 * associated video frames.
14891 */
14892
14893
14894 readSequenceParameterSet = function readSequenceParameterSet(data) {
14895 var frameCropLeftOffset = 0,
14896 frameCropRightOffset = 0,
14897 frameCropTopOffset = 0,
14898 frameCropBottomOffset = 0,
14899 expGolombDecoder,
14900 profileIdc,
14901 levelIdc,
14902 profileCompatibility,
14903 chromaFormatIdc,
14904 picOrderCntType,
14905 numRefFramesInPicOrderCntCycle,
14906 picWidthInMbsMinus1,
14907 picHeightInMapUnitsMinus1,
14908 frameMbsOnlyFlag,
14909 scalingListCount,
14910 sarRatio = [1, 1],
14911 aspectRatioIdc,
14912 i;
14913 expGolombDecoder = new expGolomb(data);
14914 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
14915
14916 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
14917
14918 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
14919
14920 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
14921 // some profiles have more optional data we don't need
14922
14923 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
14924 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
14925
14926 if (chromaFormatIdc === 3) {
14927 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
14928 }
14929
14930 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
14931
14932 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
14933
14934 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
14935
14936 if (expGolombDecoder.readBoolean()) {
14937 // seq_scaling_matrix_present_flag
14938 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
14939
14940 for (i = 0; i < scalingListCount; i++) {
14941 if (expGolombDecoder.readBoolean()) {
14942 // seq_scaling_list_present_flag[ i ]
14943 if (i < 6) {
14944 skipScalingList(16, expGolombDecoder);
14945 } else {
14946 skipScalingList(64, expGolombDecoder);
14947 }
14948 }
14949 }
14950 }
14951 }
14952
14953 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
14954
14955 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
14956
14957 if (picOrderCntType === 0) {
14958 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
14959 } else if (picOrderCntType === 1) {
14960 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
14961
14962 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
14963
14964 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
14965
14966 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
14967
14968 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
14969 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
14970 }
14971 }
14972
14973 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
14974
14975 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
14976
14977 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
14978 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
14979 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
14980
14981 if (frameMbsOnlyFlag === 0) {
14982 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
14983 }
14984
14985 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
14986
14987 if (expGolombDecoder.readBoolean()) {
14988 // frame_cropping_flag
14989 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
14990 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
14991 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
14992 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
14993 }
14994
14995 if (expGolombDecoder.readBoolean()) {
14996 // vui_parameters_present_flag
14997 if (expGolombDecoder.readBoolean()) {
14998 // aspect_ratio_info_present_flag
14999 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
15000
15001 switch (aspectRatioIdc) {
15002 case 1:
15003 sarRatio = [1, 1];
15004 break;
15005
15006 case 2:
15007 sarRatio = [12, 11];
15008 break;
15009
15010 case 3:
15011 sarRatio = [10, 11];
15012 break;
15013
15014 case 4:
15015 sarRatio = [16, 11];
15016 break;
15017
15018 case 5:
15019 sarRatio = [40, 33];
15020 break;
15021
15022 case 6:
15023 sarRatio = [24, 11];
15024 break;
15025
15026 case 7:
15027 sarRatio = [20, 11];
15028 break;
15029
15030 case 8:
15031 sarRatio = [32, 11];
15032 break;
15033
15034 case 9:
15035 sarRatio = [80, 33];
15036 break;
15037
15038 case 10:
15039 sarRatio = [18, 11];
15040 break;
15041
15042 case 11:
15043 sarRatio = [15, 11];
15044 break;
15045
15046 case 12:
15047 sarRatio = [64, 33];
15048 break;
15049
15050 case 13:
15051 sarRatio = [160, 99];
15052 break;
15053
15054 case 14:
15055 sarRatio = [4, 3];
15056 break;
15057
15058 case 15:
15059 sarRatio = [3, 2];
15060 break;
15061
15062 case 16:
15063 sarRatio = [2, 1];
15064 break;
15065
15066 case 255:
15067 {
15068 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
15069 break;
15070 }
15071 }
15072
15073 if (sarRatio) {
15074 sarRatio[0] / sarRatio[1];
15075 }
15076 }
15077 }
15078
15079 return {
15080 profileIdc: profileIdc,
15081 levelIdc: levelIdc,
15082 profileCompatibility: profileCompatibility,
15083 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
15084 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
15085 // sar is sample aspect ratio
15086 sarRatio: sarRatio
15087 };
15088 };
15089 };
15090
15091 _H264Stream.prototype = new stream();
15092 var h264 = {
15093 H264Stream: _H264Stream,
15094 NalByteStream: _NalByteStream
15095 };
15096 /**
15097 * mux.js
15098 *
15099 * Copyright (c) Brightcove
15100 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15101 *
15102 * Utilities to detect basic properties and metadata about Aac data.
15103 */
15104
15105 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
15106
15107 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
15108 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
15109 flags = header[byteIndex + 5],
15110 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
15111
15112 returnSize = returnSize >= 0 ? returnSize : 0;
15113
15114 if (footerPresent) {
15115 return returnSize + 20;
15116 }
15117
15118 return returnSize + 10;
15119 };
15120
15121 var getId3Offset = function getId3Offset(data, offset) {
15122 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
15123 return offset;
15124 }
15125
15126 offset += parseId3TagSize(data, offset);
15127 return getId3Offset(data, offset);
15128 }; // TODO: use vhs-utils
15129
15130
15131 var isLikelyAacData$1 = function isLikelyAacData(data) {
15132 var offset = getId3Offset(data, 0);
15133 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
15134 // is not mp3 data but aac data.
15135 (data[offset + 1] & 0x16) === 0x10;
15136 };
15137
15138 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
15139 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
15140 }; // return a percent-encoded representation of the specified byte range
15141 // @see http://en.wikipedia.org/wiki/Percent-encoding
15142
15143
15144 var percentEncode = function percentEncode(bytes, start, end) {
15145 var i,
15146 result = '';
15147
15148 for (i = start; i < end; i++) {
15149 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
15150 }
15151
15152 return result;
15153 }; // return the string representation of the specified byte range,
15154 // interpreted as ISO-8859-1.
15155
15156
15157 var parseIso88591 = function parseIso88591(bytes, start, end) {
15158 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
15159 };
15160
15161 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
15162 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
15163 middle = header[byteIndex + 4] << 3,
15164 highTwo = header[byteIndex + 3] & 0x3 << 11;
15165 return highTwo | middle | lowThree;
15166 };
15167
15168 var parseType$2 = function parseType(header, byteIndex) {
15169 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
15170 return 'timed-metadata';
15171 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
15172 return 'audio';
15173 }
15174
15175 return null;
15176 };
15177
15178 var parseSampleRate = function parseSampleRate(packet) {
15179 var i = 0;
15180
15181 while (i + 5 < packet.length) {
15182 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
15183 // If a valid header was not found, jump one forward and attempt to
15184 // find a valid ADTS header starting at the next byte
15185 i++;
15186 continue;
15187 }
15188
15189 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
15190 }
15191
15192 return null;
15193 };
15194
15195 var parseAacTimestamp = function parseAacTimestamp(packet) {
15196 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
15197
15198 frameStart = 10;
15199
15200 if (packet[5] & 0x40) {
15201 // advance the frame start past the extended header
15202 frameStart += 4; // header size field
15203
15204 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
15205 } // parse one or more ID3 frames
15206 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
15207
15208
15209 do {
15210 // determine the number of bytes in this frame
15211 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
15212
15213 if (frameSize < 1) {
15214 return null;
15215 }
15216
15217 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
15218
15219 if (frameHeader === 'PRIV') {
15220 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
15221
15222 for (var i = 0; i < frame.byteLength; i++) {
15223 if (frame[i] === 0) {
15224 var owner = parseIso88591(frame, 0, i);
15225
15226 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
15227 var d = frame.subarray(i + 1);
15228 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
15229 size *= 4;
15230 size += d[7] & 0x03;
15231 return size;
15232 }
15233
15234 break;
15235 }
15236 }
15237 }
15238
15239 frameStart += 10; // advance past the frame header
15240
15241 frameStart += frameSize; // advance past the frame body
15242 } while (frameStart < packet.byteLength);
15243
15244 return null;
15245 };
15246
15247 var utils = {
15248 isLikelyAacData: isLikelyAacData$1,
15249 parseId3TagSize: parseId3TagSize,
15250 parseAdtsSize: parseAdtsSize,
15251 parseType: parseType$2,
15252 parseSampleRate: parseSampleRate,
15253 parseAacTimestamp: parseAacTimestamp
15254 };
15255
15256 var _AacStream;
15257 /**
15258 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
15259 */
15260
15261
15262 _AacStream = function AacStream() {
15263 var everything = new Uint8Array(),
15264 timeStamp = 0;
15265
15266 _AacStream.prototype.init.call(this);
15267
15268 this.setTimestamp = function (timestamp) {
15269 timeStamp = timestamp;
15270 };
15271
15272 this.push = function (bytes) {
15273 var frameSize = 0,
15274 byteIndex = 0,
15275 bytesLeft,
15276 chunk,
15277 packet,
15278 tempLength; // If there are bytes remaining from the last segment, prepend them to the
15279 // bytes that were pushed in
15280
15281 if (everything.length) {
15282 tempLength = everything.length;
15283 everything = new Uint8Array(bytes.byteLength + tempLength);
15284 everything.set(everything.subarray(0, tempLength));
15285 everything.set(bytes, tempLength);
15286 } else {
15287 everything = bytes;
15288 }
15289
15290 while (everything.length - byteIndex >= 3) {
15291 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
15292 // Exit early because we don't have enough to parse
15293 // the ID3 tag header
15294 if (everything.length - byteIndex < 10) {
15295 break;
15296 } // check framesize
15297
15298
15299 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15300 // to emit a full packet
15301 // Add to byteIndex to support multiple ID3 tags in sequence
15302
15303 if (byteIndex + frameSize > everything.length) {
15304 break;
15305 }
15306
15307 chunk = {
15308 type: 'timed-metadata',
15309 data: everything.subarray(byteIndex, byteIndex + frameSize)
15310 };
15311 this.trigger('data', chunk);
15312 byteIndex += frameSize;
15313 continue;
15314 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
15315 // Exit early because we don't have enough to parse
15316 // the ADTS frame header
15317 if (everything.length - byteIndex < 7) {
15318 break;
15319 }
15320
15321 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15322 // to emit a full packet
15323
15324 if (byteIndex + frameSize > everything.length) {
15325 break;
15326 }
15327
15328 packet = {
15329 type: 'audio',
15330 data: everything.subarray(byteIndex, byteIndex + frameSize),
15331 pts: timeStamp,
15332 dts: timeStamp
15333 };
15334 this.trigger('data', packet);
15335 byteIndex += frameSize;
15336 continue;
15337 }
15338
15339 byteIndex++;
15340 }
15341
15342 bytesLeft = everything.length - byteIndex;
15343
15344 if (bytesLeft > 0) {
15345 everything = everything.subarray(byteIndex);
15346 } else {
15347 everything = new Uint8Array();
15348 }
15349 };
15350
15351 this.reset = function () {
15352 everything = new Uint8Array();
15353 this.trigger('reset');
15354 };
15355
15356 this.endTimeline = function () {
15357 everything = new Uint8Array();
15358 this.trigger('endedtimeline');
15359 };
15360 };
15361
15362 _AacStream.prototype = new stream();
15363 var aac = _AacStream; // constants
15364
15365 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
15366 var audioProperties = AUDIO_PROPERTIES;
15367 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
15368 var videoProperties = VIDEO_PROPERTIES;
15369 var H264Stream = h264.H264Stream;
15370 var isLikelyAacData = utils.isLikelyAacData;
15371 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
15372
15373 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
15374
15375 var retriggerForStream = function retriggerForStream(key, event) {
15376 event.stream = key;
15377 this.trigger('log', event);
15378 };
15379
15380 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
15381 var keys = Object.keys(pipeline);
15382
15383 for (var i = 0; i < keys.length; i++) {
15384 var key = keys[i]; // skip non-stream keys and headOfPipeline
15385 // which is just a duplicate
15386
15387 if (key === 'headOfPipeline' || !pipeline[key].on) {
15388 continue;
15389 }
15390
15391 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
15392 }
15393 };
15394 /**
15395 * Compare two arrays (even typed) for same-ness
15396 */
15397
15398
15399 var arrayEquals = function arrayEquals(a, b) {
15400 var i;
15401
15402 if (a.length !== b.length) {
15403 return false;
15404 } // compare the value of each element in the array
15405
15406
15407 for (i = 0; i < a.length; i++) {
15408 if (a[i] !== b[i]) {
15409 return false;
15410 }
15411 }
15412
15413 return true;
15414 };
15415
15416 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
15417 var ptsOffsetFromDts = startPts - startDts,
15418 decodeDuration = endDts - startDts,
15419 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
15420 // however, the player time values will reflect a start from the baseMediaDecodeTime.
15421 // In order to provide relevant values for the player times, base timing info on the
15422 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
15423
15424 return {
15425 start: {
15426 dts: baseMediaDecodeTime,
15427 pts: baseMediaDecodeTime + ptsOffsetFromDts
15428 },
15429 end: {
15430 dts: baseMediaDecodeTime + decodeDuration,
15431 pts: baseMediaDecodeTime + presentationDuration
15432 },
15433 prependedContentDuration: prependedContentDuration,
15434 baseMediaDecodeTime: baseMediaDecodeTime
15435 };
15436 };
15437 /**
15438 * Constructs a single-track, ISO BMFF media segment from AAC data
15439 * events. The output of this stream can be fed to a SourceBuffer
15440 * configured with a suitable initialization segment.
15441 * @param track {object} track metadata configuration
15442 * @param options {object} transmuxer options object
15443 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15444 * in the source; false to adjust the first segment to start at 0.
15445 */
15446
15447
15448 _AudioSegmentStream = function AudioSegmentStream(track, options) {
15449 var adtsFrames = [],
15450 sequenceNumber,
15451 earliestAllowedDts = 0,
15452 audioAppendStartTs = 0,
15453 videoBaseMediaDecodeTime = Infinity;
15454 options = options || {};
15455 sequenceNumber = options.firstSequenceNumber || 0;
15456
15457 _AudioSegmentStream.prototype.init.call(this);
15458
15459 this.push = function (data) {
15460 trackDecodeInfo.collectDtsInfo(track, data);
15461
15462 if (track) {
15463 audioProperties.forEach(function (prop) {
15464 track[prop] = data[prop];
15465 });
15466 } // buffer audio data until end() is called
15467
15468
15469 adtsFrames.push(data);
15470 };
15471
15472 this.setEarliestDts = function (earliestDts) {
15473 earliestAllowedDts = earliestDts;
15474 };
15475
15476 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
15477 videoBaseMediaDecodeTime = baseMediaDecodeTime;
15478 };
15479
15480 this.setAudioAppendStart = function (timestamp) {
15481 audioAppendStartTs = timestamp;
15482 };
15483
15484 this.flush = function () {
15485 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
15486
15487 if (adtsFrames.length === 0) {
15488 this.trigger('done', 'AudioSegmentStream');
15489 return;
15490 }
15491
15492 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
15493 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
15494
15495 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
15496 // samples (that is, adts frames) in the audio data
15497
15498 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
15499
15500 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
15501 adtsFrames = [];
15502 moof = mp4Generator.moof(sequenceNumber, [track]);
15503 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
15504
15505 sequenceNumber++;
15506 boxes.set(moof);
15507 boxes.set(mdat, moof.byteLength);
15508 trackDecodeInfo.clearDtsInfo(track);
15509 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
15510 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
15511 // valid use-case where an init segment/data should be triggered without associated
15512 // frames. Leaving for now, but should be looked into.
15513
15514 if (frames.length) {
15515 segmentDuration = frames.length * frameDuration;
15516 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
15517 // frame info is in video clock cycles. Convert to match expectation of
15518 // listeners (that all timestamps will be based on video clock cycles).
15519 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
15520 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
15521 this.trigger('timingInfo', {
15522 start: frames[0].pts,
15523 end: frames[0].pts + segmentDuration
15524 });
15525 }
15526
15527 this.trigger('data', {
15528 track: track,
15529 boxes: boxes
15530 });
15531 this.trigger('done', 'AudioSegmentStream');
15532 };
15533
15534 this.reset = function () {
15535 trackDecodeInfo.clearDtsInfo(track);
15536 adtsFrames = [];
15537 this.trigger('reset');
15538 };
15539 };
15540
15541 _AudioSegmentStream.prototype = new stream();
15542 /**
15543 * Constructs a single-track, ISO BMFF media segment from H264 data
15544 * events. The output of this stream can be fed to a SourceBuffer
15545 * configured with a suitable initialization segment.
15546 * @param track {object} track metadata configuration
15547 * @param options {object} transmuxer options object
15548 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
15549 * gopsToAlignWith list when attempting to align gop pts
15550 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15551 * in the source; false to adjust the first segment to start at 0.
15552 */
15553
15554 _VideoSegmentStream = function VideoSegmentStream(track, options) {
15555 var sequenceNumber,
15556 nalUnits = [],
15557 gopsToAlignWith = [],
15558 config,
15559 pps;
15560 options = options || {};
15561 sequenceNumber = options.firstSequenceNumber || 0;
15562
15563 _VideoSegmentStream.prototype.init.call(this);
15564
15565 delete track.minPTS;
15566 this.gopCache_ = [];
15567 /**
15568 * Constructs a ISO BMFF segment given H264 nalUnits
15569 * @param {Object} nalUnit A data event representing a nalUnit
15570 * @param {String} nalUnit.nalUnitType
15571 * @param {Object} nalUnit.config Properties for a mp4 track
15572 * @param {Uint8Array} nalUnit.data The nalUnit bytes
15573 * @see lib/codecs/h264.js
15574 **/
15575
15576 this.push = function (nalUnit) {
15577 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
15578
15579 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
15580 config = nalUnit.config;
15581 track.sps = [nalUnit.data];
15582 videoProperties.forEach(function (prop) {
15583 track[prop] = config[prop];
15584 }, this);
15585 }
15586
15587 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
15588 pps = nalUnit.data;
15589 track.pps = [nalUnit.data];
15590 } // buffer video until flush() is called
15591
15592
15593 nalUnits.push(nalUnit);
15594 };
15595 /**
15596 * Pass constructed ISO BMFF track and boxes on to the
15597 * next stream in the pipeline
15598 **/
15599
15600
15601 this.flush = function () {
15602 var frames,
15603 gopForFusion,
15604 gops,
15605 moof,
15606 mdat,
15607 boxes,
15608 prependedContentDuration = 0,
15609 firstGop,
15610 lastGop; // Throw away nalUnits at the start of the byte stream until
15611 // we find the first AUD
15612
15613 while (nalUnits.length) {
15614 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
15615 break;
15616 }
15617
15618 nalUnits.shift();
15619 } // Return early if no video data has been observed
15620
15621
15622 if (nalUnits.length === 0) {
15623 this.resetStream_();
15624 this.trigger('done', 'VideoSegmentStream');
15625 return;
15626 } // Organize the raw nal-units into arrays that represent
15627 // higher-level constructs such as frames and gops
15628 // (group-of-pictures)
15629
15630
15631 frames = frameUtils.groupNalsIntoFrames(nalUnits);
15632 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
15633 // a problem since MSE (on Chrome) requires a leading keyframe.
15634 //
15635 // We have two approaches to repairing this situation:
15636 // 1) GOP-FUSION:
15637 // This is where we keep track of the GOPS (group-of-pictures)
15638 // from previous fragments and attempt to find one that we can
15639 // prepend to the current fragment in order to create a valid
15640 // fragment.
15641 // 2) KEYFRAME-PULLING:
15642 // Here we search for the first keyframe in the fragment and
15643 // throw away all the frames between the start of the fragment
15644 // and that keyframe. We then extend the duration and pull the
15645 // PTS of the keyframe forward so that it covers the time range
15646 // of the frames that were disposed of.
15647 //
15648 // #1 is far prefereable over #2 which can cause "stuttering" but
15649 // requires more things to be just right.
15650
15651 if (!gops[0][0].keyFrame) {
15652 // Search for a gop for fusion from our gopCache
15653 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
15654
15655 if (gopForFusion) {
15656 // in order to provide more accurate timing information about the segment, save
15657 // the number of seconds prepended to the original segment due to GOP fusion
15658 prependedContentDuration = gopForFusion.duration;
15659 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
15660 // new gop at the beginning
15661
15662 gops.byteLength += gopForFusion.byteLength;
15663 gops.nalCount += gopForFusion.nalCount;
15664 gops.pts = gopForFusion.pts;
15665 gops.dts = gopForFusion.dts;
15666 gops.duration += gopForFusion.duration;
15667 } else {
15668 // If we didn't find a candidate gop fall back to keyframe-pulling
15669 gops = frameUtils.extendFirstKeyFrame(gops);
15670 }
15671 } // Trim gops to align with gopsToAlignWith
15672
15673
15674 if (gopsToAlignWith.length) {
15675 var alignedGops;
15676
15677 if (options.alignGopsAtEnd) {
15678 alignedGops = this.alignGopsAtEnd_(gops);
15679 } else {
15680 alignedGops = this.alignGopsAtStart_(gops);
15681 }
15682
15683 if (!alignedGops) {
15684 // save all the nals in the last GOP into the gop cache
15685 this.gopCache_.unshift({
15686 gop: gops.pop(),
15687 pps: track.pps,
15688 sps: track.sps
15689 }); // Keep a maximum of 6 GOPs in the cache
15690
15691 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15692
15693 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
15694
15695 this.resetStream_();
15696 this.trigger('done', 'VideoSegmentStream');
15697 return;
15698 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
15699 // when recalculated before sending off to CoalesceStream
15700
15701
15702 trackDecodeInfo.clearDtsInfo(track);
15703 gops = alignedGops;
15704 }
15705
15706 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
15707 // samples (that is, frames) in the video data
15708
15709 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
15710
15711 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
15712 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
15713 this.trigger('processedGopsInfo', gops.map(function (gop) {
15714 return {
15715 pts: gop.pts,
15716 dts: gop.dts,
15717 byteLength: gop.byteLength
15718 };
15719 }));
15720 firstGop = gops[0];
15721 lastGop = gops[gops.length - 1];
15722 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
15723 this.trigger('timingInfo', {
15724 start: gops[0].pts,
15725 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
15726 }); // save all the nals in the last GOP into the gop cache
15727
15728 this.gopCache_.unshift({
15729 gop: gops.pop(),
15730 pps: track.pps,
15731 sps: track.sps
15732 }); // Keep a maximum of 6 GOPs in the cache
15733
15734 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15735
15736 nalUnits = [];
15737 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
15738 this.trigger('timelineStartInfo', track.timelineStartInfo);
15739 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
15740 // throwing away hundreds of media segment fragments
15741
15742 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
15743
15744 sequenceNumber++;
15745 boxes.set(moof);
15746 boxes.set(mdat, moof.byteLength);
15747 this.trigger('data', {
15748 track: track,
15749 boxes: boxes
15750 });
15751 this.resetStream_(); // Continue with the flush process now
15752
15753 this.trigger('done', 'VideoSegmentStream');
15754 };
15755
15756 this.reset = function () {
15757 this.resetStream_();
15758 nalUnits = [];
15759 this.gopCache_.length = 0;
15760 gopsToAlignWith.length = 0;
15761 this.trigger('reset');
15762 };
15763
15764 this.resetStream_ = function () {
15765 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
15766 // for instance, when we are rendition switching
15767
15768 config = undefined;
15769 pps = undefined;
15770 }; // Search for a candidate Gop for gop-fusion from the gop cache and
15771 // return it or return null if no good candidate was found
15772
15773
15774 this.getGopForFusion_ = function (nalUnit) {
15775 var halfSecond = 45000,
15776 // Half-a-second in a 90khz clock
15777 allowableOverlap = 10000,
15778 // About 3 frames @ 30fps
15779 nearestDistance = Infinity,
15780 dtsDistance,
15781 nearestGopObj,
15782 currentGop,
15783 currentGopObj,
15784 i; // Search for the GOP nearest to the beginning of this nal unit
15785
15786 for (i = 0; i < this.gopCache_.length; i++) {
15787 currentGopObj = this.gopCache_[i];
15788 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
15789
15790 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
15791 continue;
15792 } // Reject Gops that would require a negative baseMediaDecodeTime
15793
15794
15795 if (currentGop.dts < track.timelineStartInfo.dts) {
15796 continue;
15797 } // The distance between the end of the gop and the start of the nalUnit
15798
15799
15800 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
15801 // a half-second of the nal unit
15802
15803 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
15804 // Always use the closest GOP we found if there is more than
15805 // one candidate
15806 if (!nearestGopObj || nearestDistance > dtsDistance) {
15807 nearestGopObj = currentGopObj;
15808 nearestDistance = dtsDistance;
15809 }
15810 }
15811 }
15812
15813 if (nearestGopObj) {
15814 return nearestGopObj.gop;
15815 }
15816
15817 return null;
15818 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15819 // of gopsToAlignWith starting from the START of the list
15820
15821
15822 this.alignGopsAtStart_ = function (gops) {
15823 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
15824 byteLength = gops.byteLength;
15825 nalCount = gops.nalCount;
15826 duration = gops.duration;
15827 alignIndex = gopIndex = 0;
15828
15829 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
15830 align = gopsToAlignWith[alignIndex];
15831 gop = gops[gopIndex];
15832
15833 if (align.pts === gop.pts) {
15834 break;
15835 }
15836
15837 if (gop.pts > align.pts) {
15838 // this current gop starts after the current gop we want to align on, so increment
15839 // align index
15840 alignIndex++;
15841 continue;
15842 } // current gop starts before the current gop we want to align on. so increment gop
15843 // index
15844
15845
15846 gopIndex++;
15847 byteLength -= gop.byteLength;
15848 nalCount -= gop.nalCount;
15849 duration -= gop.duration;
15850 }
15851
15852 if (gopIndex === 0) {
15853 // no gops to trim
15854 return gops;
15855 }
15856
15857 if (gopIndex === gops.length) {
15858 // all gops trimmed, skip appending all gops
15859 return null;
15860 }
15861
15862 alignedGops = gops.slice(gopIndex);
15863 alignedGops.byteLength = byteLength;
15864 alignedGops.duration = duration;
15865 alignedGops.nalCount = nalCount;
15866 alignedGops.pts = alignedGops[0].pts;
15867 alignedGops.dts = alignedGops[0].dts;
15868 return alignedGops;
15869 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15870 // of gopsToAlignWith starting from the END of the list
15871
15872
15873 this.alignGopsAtEnd_ = function (gops) {
15874 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
15875 alignIndex = gopsToAlignWith.length - 1;
15876 gopIndex = gops.length - 1;
15877 alignEndIndex = null;
15878 matchFound = false;
15879
15880 while (alignIndex >= 0 && gopIndex >= 0) {
15881 align = gopsToAlignWith[alignIndex];
15882 gop = gops[gopIndex];
15883
15884 if (align.pts === gop.pts) {
15885 matchFound = true;
15886 break;
15887 }
15888
15889 if (align.pts > gop.pts) {
15890 alignIndex--;
15891 continue;
15892 }
15893
15894 if (alignIndex === gopsToAlignWith.length - 1) {
15895 // gop.pts is greater than the last alignment candidate. If no match is found
15896 // by the end of this loop, we still want to append gops that come after this
15897 // point
15898 alignEndIndex = gopIndex;
15899 }
15900
15901 gopIndex--;
15902 }
15903
15904 if (!matchFound && alignEndIndex === null) {
15905 return null;
15906 }
15907
15908 var trimIndex;
15909
15910 if (matchFound) {
15911 trimIndex = gopIndex;
15912 } else {
15913 trimIndex = alignEndIndex;
15914 }
15915
15916 if (trimIndex === 0) {
15917 return gops;
15918 }
15919
15920 var alignedGops = gops.slice(trimIndex);
15921 var metadata = alignedGops.reduce(function (total, gop) {
15922 total.byteLength += gop.byteLength;
15923 total.duration += gop.duration;
15924 total.nalCount += gop.nalCount;
15925 return total;
15926 }, {
15927 byteLength: 0,
15928 duration: 0,
15929 nalCount: 0
15930 });
15931 alignedGops.byteLength = metadata.byteLength;
15932 alignedGops.duration = metadata.duration;
15933 alignedGops.nalCount = metadata.nalCount;
15934 alignedGops.pts = alignedGops[0].pts;
15935 alignedGops.dts = alignedGops[0].dts;
15936 return alignedGops;
15937 };
15938
15939 this.alignGopsWith = function (newGopsToAlignWith) {
15940 gopsToAlignWith = newGopsToAlignWith;
15941 };
15942 };
15943
15944 _VideoSegmentStream.prototype = new stream();
15945 /**
15946 * A Stream that can combine multiple streams (ie. audio & video)
15947 * into a single output segment for MSE. Also supports audio-only
15948 * and video-only streams.
15949 * @param options {object} transmuxer options object
15950 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15951 * in the source; false to adjust the first segment to start at media timeline start.
15952 */
15953
15954 _CoalesceStream = function CoalesceStream(options, metadataStream) {
15955 // Number of Tracks per output segment
15956 // If greater than 1, we combine multiple
15957 // tracks into a single segment
15958 this.numberOfTracks = 0;
15959 this.metadataStream = metadataStream;
15960 options = options || {};
15961
15962 if (typeof options.remux !== 'undefined') {
15963 this.remuxTracks = !!options.remux;
15964 } else {
15965 this.remuxTracks = true;
15966 }
15967
15968 if (typeof options.keepOriginalTimestamps === 'boolean') {
15969 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
15970 } else {
15971 this.keepOriginalTimestamps = false;
15972 }
15973
15974 this.pendingTracks = [];
15975 this.videoTrack = null;
15976 this.pendingBoxes = [];
15977 this.pendingCaptions = [];
15978 this.pendingMetadata = [];
15979 this.pendingBytes = 0;
15980 this.emittedTracks = 0;
15981
15982 _CoalesceStream.prototype.init.call(this); // Take output from multiple
15983
15984
15985 this.push = function (output) {
15986 // buffer incoming captions until the associated video segment
15987 // finishes
15988 if (output.text) {
15989 return this.pendingCaptions.push(output);
15990 } // buffer incoming id3 tags until the final flush
15991
15992
15993 if (output.frames) {
15994 return this.pendingMetadata.push(output);
15995 } // Add this track to the list of pending tracks and store
15996 // important information required for the construction of
15997 // the final segment
15998
15999
16000 this.pendingTracks.push(output.track);
16001 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
16002 // We unshift audio and push video because
16003 // as of Chrome 75 when switching from
16004 // one init segment to another if the video
16005 // mdat does not appear after the audio mdat
16006 // only audio will play for the duration of our transmux.
16007
16008 if (output.track.type === 'video') {
16009 this.videoTrack = output.track;
16010 this.pendingBoxes.push(output.boxes);
16011 }
16012
16013 if (output.track.type === 'audio') {
16014 this.audioTrack = output.track;
16015 this.pendingBoxes.unshift(output.boxes);
16016 }
16017 };
16018 };
16019
16020 _CoalesceStream.prototype = new stream();
16021
16022 _CoalesceStream.prototype.flush = function (flushSource) {
16023 var offset = 0,
16024 event = {
16025 captions: [],
16026 captionStreams: {},
16027 metadata: [],
16028 info: {}
16029 },
16030 caption,
16031 id3,
16032 initSegment,
16033 timelineStartPts = 0,
16034 i;
16035
16036 if (this.pendingTracks.length < this.numberOfTracks) {
16037 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
16038 // Return because we haven't received a flush from a data-generating
16039 // portion of the segment (meaning that we have only recieved meta-data
16040 // or captions.)
16041 return;
16042 } else if (this.remuxTracks) {
16043 // Return until we have enough tracks from the pipeline to remux (if we
16044 // are remuxing audio and video into a single MP4)
16045 return;
16046 } else if (this.pendingTracks.length === 0) {
16047 // In the case where we receive a flush without any data having been
16048 // received we consider it an emitted track for the purposes of coalescing
16049 // `done` events.
16050 // We do this for the case where there is an audio and video track in the
16051 // segment but no audio data. (seen in several playlists with alternate
16052 // audio tracks and no audio present in the main TS segments.)
16053 this.emittedTracks++;
16054
16055 if (this.emittedTracks >= this.numberOfTracks) {
16056 this.trigger('done');
16057 this.emittedTracks = 0;
16058 }
16059
16060 return;
16061 }
16062 }
16063
16064 if (this.videoTrack) {
16065 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
16066 videoProperties.forEach(function (prop) {
16067 event.info[prop] = this.videoTrack[prop];
16068 }, this);
16069 } else if (this.audioTrack) {
16070 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
16071 audioProperties.forEach(function (prop) {
16072 event.info[prop] = this.audioTrack[prop];
16073 }, this);
16074 }
16075
16076 if (this.videoTrack || this.audioTrack) {
16077 if (this.pendingTracks.length === 1) {
16078 event.type = this.pendingTracks[0].type;
16079 } else {
16080 event.type = 'combined';
16081 }
16082
16083 this.emittedTracks += this.pendingTracks.length;
16084 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
16085
16086 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
16087 // and track definitions
16088
16089 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
16090
16091 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
16092
16093 for (i = 0; i < this.pendingBoxes.length; i++) {
16094 event.data.set(this.pendingBoxes[i], offset);
16095 offset += this.pendingBoxes[i].byteLength;
16096 } // Translate caption PTS times into second offsets to match the
16097 // video timeline for the segment, and add track info
16098
16099
16100 for (i = 0; i < this.pendingCaptions.length; i++) {
16101 caption = this.pendingCaptions[i];
16102 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
16103 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
16104 event.captionStreams[caption.stream] = true;
16105 event.captions.push(caption);
16106 } // Translate ID3 frame PTS times into second offsets to match the
16107 // video timeline for the segment
16108
16109
16110 for (i = 0; i < this.pendingMetadata.length; i++) {
16111 id3 = this.pendingMetadata[i];
16112 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
16113 event.metadata.push(id3);
16114 } // We add this to every single emitted segment even though we only need
16115 // it for the first
16116
16117
16118 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
16119
16120 this.pendingTracks.length = 0;
16121 this.videoTrack = null;
16122 this.pendingBoxes.length = 0;
16123 this.pendingCaptions.length = 0;
16124 this.pendingBytes = 0;
16125 this.pendingMetadata.length = 0; // Emit the built segment
16126 // We include captions and ID3 tags for backwards compatibility,
16127 // ideally we should send only video and audio in the data event
16128
16129 this.trigger('data', event); // Emit each caption to the outside world
16130 // Ideally, this would happen immediately on parsing captions,
16131 // but we need to ensure that video data is sent back first
16132 // so that caption timing can be adjusted to match video timing
16133
16134 for (i = 0; i < event.captions.length; i++) {
16135 caption = event.captions[i];
16136 this.trigger('caption', caption);
16137 } // Emit each id3 tag to the outside world
16138 // Ideally, this would happen immediately on parsing the tag,
16139 // but we need to ensure that video data is sent back first
16140 // so that ID3 frame timing can be adjusted to match video timing
16141
16142
16143 for (i = 0; i < event.metadata.length; i++) {
16144 id3 = event.metadata[i];
16145 this.trigger('id3Frame', id3);
16146 }
16147 } // Only emit `done` if all tracks have been flushed and emitted
16148
16149
16150 if (this.emittedTracks >= this.numberOfTracks) {
16151 this.trigger('done');
16152 this.emittedTracks = 0;
16153 }
16154 };
16155
16156 _CoalesceStream.prototype.setRemux = function (val) {
16157 this.remuxTracks = val;
16158 };
16159 /**
16160 * A Stream that expects MP2T binary data as input and produces
16161 * corresponding media segments, suitable for use with Media Source
16162 * Extension (MSE) implementations that support the ISO BMFF byte
16163 * stream format, like Chrome.
16164 */
16165
16166
16167 _Transmuxer = function Transmuxer(options) {
16168 var self = this,
16169 hasFlushed = true,
16170 videoTrack,
16171 audioTrack;
16172
16173 _Transmuxer.prototype.init.call(this);
16174
16175 options = options || {};
16176 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
16177 this.transmuxPipeline_ = {};
16178
16179 this.setupAacPipeline = function () {
16180 var pipeline = {};
16181 this.transmuxPipeline_ = pipeline;
16182 pipeline.type = 'aac';
16183 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16184
16185 pipeline.aacStream = new aac();
16186 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
16187 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
16188 pipeline.adtsStream = new adts();
16189 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16190 pipeline.headOfPipeline = pipeline.aacStream;
16191 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
16192 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
16193 pipeline.metadataStream.on('timestamp', function (frame) {
16194 pipeline.aacStream.setTimestamp(frame.timeStamp);
16195 });
16196 pipeline.aacStream.on('data', function (data) {
16197 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
16198 return;
16199 }
16200
16201 audioTrack = audioTrack || {
16202 timelineStartInfo: {
16203 baseMediaDecodeTime: self.baseMediaDecodeTime
16204 },
16205 codec: 'adts',
16206 type: 'audio'
16207 }; // hook up the audio segment stream to the first track with aac data
16208
16209 pipeline.coalesceStream.numberOfTracks++;
16210 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16211 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16212 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
16213
16214 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
16215
16216 self.trigger('trackinfo', {
16217 hasAudio: !!audioTrack,
16218 hasVideo: !!videoTrack
16219 });
16220 }); // Re-emit any data coming from the coalesce stream to the outside world
16221
16222 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
16223
16224 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16225 addPipelineLogRetriggers(this, pipeline);
16226 };
16227
16228 this.setupTsPipeline = function () {
16229 var pipeline = {};
16230 this.transmuxPipeline_ = pipeline;
16231 pipeline.type = 'ts';
16232 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16233
16234 pipeline.packetStream = new m2ts_1.TransportPacketStream();
16235 pipeline.parseStream = new m2ts_1.TransportParseStream();
16236 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
16237 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
16238 pipeline.adtsStream = new adts();
16239 pipeline.h264Stream = new H264Stream();
16240 pipeline.captionStream = new m2ts_1.CaptionStream(options);
16241 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16242 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
16243
16244 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
16245 // demux the streams
16246
16247 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
16248 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
16249 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
16250
16251 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
16252 pipeline.elementaryStream.on('data', function (data) {
16253 var i;
16254
16255 if (data.type === 'metadata') {
16256 i = data.tracks.length; // scan the tracks listed in the metadata
16257
16258 while (i--) {
16259 if (!videoTrack && data.tracks[i].type === 'video') {
16260 videoTrack = data.tracks[i];
16261 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16262 } else if (!audioTrack && data.tracks[i].type === 'audio') {
16263 audioTrack = data.tracks[i];
16264 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16265 }
16266 } // hook up the video segment stream to the first track with h264 data
16267
16268
16269 if (videoTrack && !pipeline.videoSegmentStream) {
16270 pipeline.coalesceStream.numberOfTracks++;
16271 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
16272 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
16273 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
16274 // When video emits timelineStartInfo data after a flush, we forward that
16275 // info to the AudioSegmentStream, if it exists, because video timeline
16276 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
16277 // because this is a particularly subtle form of timestamp alteration.
16278 if (audioTrack && !options.keepOriginalTimestamps) {
16279 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
16280 // very earliest DTS we have seen in video because Chrome will
16281 // interpret any video track with a baseMediaDecodeTime that is
16282 // non-zero as a gap.
16283
16284 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
16285 }
16286 });
16287 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
16288 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
16289 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
16290 if (audioTrack) {
16291 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
16292 }
16293 });
16294 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
16295
16296 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
16297 }
16298
16299 if (audioTrack && !pipeline.audioSegmentStream) {
16300 // hook up the audio segment stream to the first track with aac data
16301 pipeline.coalesceStream.numberOfTracks++;
16302 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16303 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16304 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
16305 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
16306
16307 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
16308 } // emit pmt info
16309
16310
16311 self.trigger('trackinfo', {
16312 hasAudio: !!audioTrack,
16313 hasVideo: !!videoTrack
16314 });
16315 }
16316 }); // Re-emit any data coming from the coalesce stream to the outside world
16317
16318 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
16319 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
16320 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
16321 self.trigger('id3Frame', id3Frame);
16322 });
16323 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
16324
16325 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16326 addPipelineLogRetriggers(this, pipeline);
16327 }; // hook up the segment streams once track metadata is delivered
16328
16329
16330 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
16331 var pipeline = this.transmuxPipeline_;
16332
16333 if (!options.keepOriginalTimestamps) {
16334 this.baseMediaDecodeTime = baseMediaDecodeTime;
16335 }
16336
16337 if (audioTrack) {
16338 audioTrack.timelineStartInfo.dts = undefined;
16339 audioTrack.timelineStartInfo.pts = undefined;
16340 trackDecodeInfo.clearDtsInfo(audioTrack);
16341
16342 if (pipeline.audioTimestampRolloverStream) {
16343 pipeline.audioTimestampRolloverStream.discontinuity();
16344 }
16345 }
16346
16347 if (videoTrack) {
16348 if (pipeline.videoSegmentStream) {
16349 pipeline.videoSegmentStream.gopCache_ = [];
16350 }
16351
16352 videoTrack.timelineStartInfo.dts = undefined;
16353 videoTrack.timelineStartInfo.pts = undefined;
16354 trackDecodeInfo.clearDtsInfo(videoTrack);
16355 pipeline.captionStream.reset();
16356 }
16357
16358 if (pipeline.timestampRolloverStream) {
16359 pipeline.timestampRolloverStream.discontinuity();
16360 }
16361 };
16362
16363 this.setAudioAppendStart = function (timestamp) {
16364 if (audioTrack) {
16365 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
16366 }
16367 };
16368
16369 this.setRemux = function (val) {
16370 var pipeline = this.transmuxPipeline_;
16371 options.remux = val;
16372
16373 if (pipeline && pipeline.coalesceStream) {
16374 pipeline.coalesceStream.setRemux(val);
16375 }
16376 };
16377
16378 this.alignGopsWith = function (gopsToAlignWith) {
16379 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
16380 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
16381 }
16382 };
16383
16384 this.getLogTrigger_ = function (key) {
16385 var self = this;
16386 return function (event) {
16387 event.stream = key;
16388 self.trigger('log', event);
16389 };
16390 }; // feed incoming data to the front of the parsing pipeline
16391
16392
16393 this.push = function (data) {
16394 if (hasFlushed) {
16395 var isAac = isLikelyAacData(data);
16396
16397 if (isAac && this.transmuxPipeline_.type !== 'aac') {
16398 this.setupAacPipeline();
16399 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
16400 this.setupTsPipeline();
16401 }
16402
16403 hasFlushed = false;
16404 }
16405
16406 this.transmuxPipeline_.headOfPipeline.push(data);
16407 }; // flush any buffered data
16408
16409
16410 this.flush = function () {
16411 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
16412
16413 this.transmuxPipeline_.headOfPipeline.flush();
16414 };
16415
16416 this.endTimeline = function () {
16417 this.transmuxPipeline_.headOfPipeline.endTimeline();
16418 };
16419
16420 this.reset = function () {
16421 if (this.transmuxPipeline_.headOfPipeline) {
16422 this.transmuxPipeline_.headOfPipeline.reset();
16423 }
16424 }; // Caption data has to be reset when seeking outside buffered range
16425
16426
16427 this.resetCaptions = function () {
16428 if (this.transmuxPipeline_.captionStream) {
16429 this.transmuxPipeline_.captionStream.reset();
16430 }
16431 };
16432 };
16433
16434 _Transmuxer.prototype = new stream();
16435 var transmuxer = {
16436 Transmuxer: _Transmuxer,
16437 VideoSegmentStream: _VideoSegmentStream,
16438 AudioSegmentStream: _AudioSegmentStream,
16439 AUDIO_PROPERTIES: audioProperties,
16440 VIDEO_PROPERTIES: videoProperties,
16441 // exported for testing
16442 generateSegmentTimingInfo: generateSegmentTimingInfo
16443 };
16444 /**
16445 * mux.js
16446 *
16447 * Copyright (c) Brightcove
16448 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16449 */
16450
16451 var toUnsigned$3 = function toUnsigned(value) {
16452 return value >>> 0;
16453 };
16454
16455 var toHexString$1 = function toHexString(value) {
16456 return ('00' + value.toString(16)).slice(-2);
16457 };
16458
16459 var bin = {
16460 toUnsigned: toUnsigned$3,
16461 toHexString: toHexString$1
16462 };
16463
16464 var parseType$1 = function parseType(buffer) {
16465 var result = '';
16466 result += String.fromCharCode(buffer[0]);
16467 result += String.fromCharCode(buffer[1]);
16468 result += String.fromCharCode(buffer[2]);
16469 result += String.fromCharCode(buffer[3]);
16470 return result;
16471 };
16472
16473 var parseType_1 = parseType$1;
16474 var toUnsigned$2 = bin.toUnsigned;
16475
16476 var findBox = function findBox(data, path) {
16477 var results = [],
16478 i,
16479 size,
16480 type,
16481 end,
16482 subresults;
16483
16484 if (!path.length) {
16485 // short-circuit the search for empty paths
16486 return null;
16487 }
16488
16489 for (i = 0; i < data.byteLength;) {
16490 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
16491 type = parseType_1(data.subarray(i + 4, i + 8));
16492 end = size > 1 ? i + size : data.byteLength;
16493
16494 if (type === path[0]) {
16495 if (path.length === 1) {
16496 // this is the end of the path and we've found the box we were
16497 // looking for
16498 results.push(data.subarray(i + 8, end));
16499 } else {
16500 // recursively search for the next box along the path
16501 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
16502
16503 if (subresults.length) {
16504 results = results.concat(subresults);
16505 }
16506 }
16507 }
16508
16509 i = end;
16510 } // we've finished searching all of data
16511
16512
16513 return results;
16514 };
16515
16516 var findBox_1 = findBox;
16517 var toUnsigned$1 = bin.toUnsigned;
16518 var getUint64$1 = numbers.getUint64;
16519
16520 var tfdt = function tfdt(data) {
16521 var result = {
16522 version: data[0],
16523 flags: new Uint8Array(data.subarray(1, 4))
16524 };
16525
16526 if (result.version === 1) {
16527 result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
16528 } else {
16529 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
16530 }
16531
16532 return result;
16533 };
16534
16535 var parseTfdt = tfdt;
16536
16537 var parseSampleFlags = function parseSampleFlags(flags) {
16538 return {
16539 isLeading: (flags[0] & 0x0c) >>> 2,
16540 dependsOn: flags[0] & 0x03,
16541 isDependedOn: (flags[1] & 0xc0) >>> 6,
16542 hasRedundancy: (flags[1] & 0x30) >>> 4,
16543 paddingValue: (flags[1] & 0x0e) >>> 1,
16544 isNonSyncSample: flags[1] & 0x01,
16545 degradationPriority: flags[2] << 8 | flags[3]
16546 };
16547 };
16548
16549 var parseSampleFlags_1 = parseSampleFlags;
16550
16551 var trun = function trun(data) {
16552 var result = {
16553 version: data[0],
16554 flags: new Uint8Array(data.subarray(1, 4)),
16555 samples: []
16556 },
16557 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16558 // Flag interpretation
16559 dataOffsetPresent = result.flags[2] & 0x01,
16560 // compare with 2nd byte of 0x1
16561 firstSampleFlagsPresent = result.flags[2] & 0x04,
16562 // compare with 2nd byte of 0x4
16563 sampleDurationPresent = result.flags[1] & 0x01,
16564 // compare with 2nd byte of 0x100
16565 sampleSizePresent = result.flags[1] & 0x02,
16566 // compare with 2nd byte of 0x200
16567 sampleFlagsPresent = result.flags[1] & 0x04,
16568 // compare with 2nd byte of 0x400
16569 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
16570 // compare with 2nd byte of 0x800
16571 sampleCount = view.getUint32(4),
16572 offset = 8,
16573 sample;
16574
16575 if (dataOffsetPresent) {
16576 // 32 bit signed integer
16577 result.dataOffset = view.getInt32(offset);
16578 offset += 4;
16579 } // Overrides the flags for the first sample only. The order of
16580 // optional values will be: duration, size, compositionTimeOffset
16581
16582
16583 if (firstSampleFlagsPresent && sampleCount) {
16584 sample = {
16585 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
16586 };
16587 offset += 4;
16588
16589 if (sampleDurationPresent) {
16590 sample.duration = view.getUint32(offset);
16591 offset += 4;
16592 }
16593
16594 if (sampleSizePresent) {
16595 sample.size = view.getUint32(offset);
16596 offset += 4;
16597 }
16598
16599 if (sampleCompositionTimeOffsetPresent) {
16600 if (result.version === 1) {
16601 sample.compositionTimeOffset = view.getInt32(offset);
16602 } else {
16603 sample.compositionTimeOffset = view.getUint32(offset);
16604 }
16605
16606 offset += 4;
16607 }
16608
16609 result.samples.push(sample);
16610 sampleCount--;
16611 }
16612
16613 while (sampleCount--) {
16614 sample = {};
16615
16616 if (sampleDurationPresent) {
16617 sample.duration = view.getUint32(offset);
16618 offset += 4;
16619 }
16620
16621 if (sampleSizePresent) {
16622 sample.size = view.getUint32(offset);
16623 offset += 4;
16624 }
16625
16626 if (sampleFlagsPresent) {
16627 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
16628 offset += 4;
16629 }
16630
16631 if (sampleCompositionTimeOffsetPresent) {
16632 if (result.version === 1) {
16633 sample.compositionTimeOffset = view.getInt32(offset);
16634 } else {
16635 sample.compositionTimeOffset = view.getUint32(offset);
16636 }
16637
16638 offset += 4;
16639 }
16640
16641 result.samples.push(sample);
16642 }
16643
16644 return result;
16645 };
16646
16647 var parseTrun = trun;
16648
16649 var tfhd = function tfhd(data) {
16650 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16651 result = {
16652 version: data[0],
16653 flags: new Uint8Array(data.subarray(1, 4)),
16654 trackId: view.getUint32(4)
16655 },
16656 baseDataOffsetPresent = result.flags[2] & 0x01,
16657 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
16658 defaultSampleDurationPresent = result.flags[2] & 0x08,
16659 defaultSampleSizePresent = result.flags[2] & 0x10,
16660 defaultSampleFlagsPresent = result.flags[2] & 0x20,
16661 durationIsEmpty = result.flags[0] & 0x010000,
16662 defaultBaseIsMoof = result.flags[0] & 0x020000,
16663 i;
16664 i = 8;
16665
16666 if (baseDataOffsetPresent) {
16667 i += 4; // truncate top 4 bytes
16668 // FIXME: should we read the full 64 bits?
16669
16670 result.baseDataOffset = view.getUint32(12);
16671 i += 4;
16672 }
16673
16674 if (sampleDescriptionIndexPresent) {
16675 result.sampleDescriptionIndex = view.getUint32(i);
16676 i += 4;
16677 }
16678
16679 if (defaultSampleDurationPresent) {
16680 result.defaultSampleDuration = view.getUint32(i);
16681 i += 4;
16682 }
16683
16684 if (defaultSampleSizePresent) {
16685 result.defaultSampleSize = view.getUint32(i);
16686 i += 4;
16687 }
16688
16689 if (defaultSampleFlagsPresent) {
16690 result.defaultSampleFlags = view.getUint32(i);
16691 }
16692
16693 if (durationIsEmpty) {
16694 result.durationIsEmpty = true;
16695 }
16696
16697 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
16698 result.baseDataOffsetIsMoof = true;
16699 }
16700
16701 return result;
16702 };
16703
16704 var parseTfhd = tfhd;
16705 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
16706 var win;
16707
16708 if (typeof window !== "undefined") {
16709 win = window;
16710 } else if (typeof commonjsGlobal !== "undefined") {
16711 win = commonjsGlobal;
16712 } else if (typeof self !== "undefined") {
16713 win = self;
16714 } else {
16715 win = {};
16716 }
16717
16718 var window_1 = win;
16719 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
16720 var CaptionStream = captionStream.CaptionStream;
16721 /**
16722 * Maps an offset in the mdat to a sample based on the the size of the samples.
16723 * Assumes that `parseSamples` has been called first.
16724 *
16725 * @param {Number} offset - The offset into the mdat
16726 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
16727 * @return {?Object} The matching sample, or null if no match was found.
16728 *
16729 * @see ISO-BMFF-12/2015, Section 8.8.8
16730 **/
16731
16732 var mapToSample = function mapToSample(offset, samples) {
16733 var approximateOffset = offset;
16734
16735 for (var i = 0; i < samples.length; i++) {
16736 var sample = samples[i];
16737
16738 if (approximateOffset < sample.size) {
16739 return sample;
16740 }
16741
16742 approximateOffset -= sample.size;
16743 }
16744
16745 return null;
16746 };
16747 /**
16748 * Finds SEI nal units contained in a Media Data Box.
16749 * Assumes that `parseSamples` has been called first.
16750 *
16751 * @param {Uint8Array} avcStream - The bytes of the mdat
16752 * @param {Object[]} samples - The samples parsed out by `parseSamples`
16753 * @param {Number} trackId - The trackId of this video track
16754 * @return {Object[]} seiNals - the parsed SEI NALUs found.
16755 * The contents of the seiNal should match what is expected by
16756 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
16757 *
16758 * @see ISO-BMFF-12/2015, Section 8.1.1
16759 * @see Rec. ITU-T H.264, 7.3.2.3.1
16760 **/
16761
16762
16763 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
16764 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
16765 result = {
16766 logs: [],
16767 seiNals: []
16768 },
16769 seiNal,
16770 i,
16771 length,
16772 lastMatchedSample;
16773
16774 for (i = 0; i + 4 < avcStream.length; i += length) {
16775 length = avcView.getUint32(i);
16776 i += 4; // Bail if this doesn't appear to be an H264 stream
16777
16778 if (length <= 0) {
16779 continue;
16780 }
16781
16782 switch (avcStream[i] & 0x1F) {
16783 case 0x06:
16784 var data = avcStream.subarray(i + 1, i + 1 + length);
16785 var matchingSample = mapToSample(i, samples);
16786 seiNal = {
16787 nalUnitType: 'sei_rbsp',
16788 size: length,
16789 data: data,
16790 escapedRBSP: discardEmulationPreventionBytes(data),
16791 trackId: trackId
16792 };
16793
16794 if (matchingSample) {
16795 seiNal.pts = matchingSample.pts;
16796 seiNal.dts = matchingSample.dts;
16797 lastMatchedSample = matchingSample;
16798 } else if (lastMatchedSample) {
16799 // If a matching sample cannot be found, use the last
16800 // sample's values as they should be as close as possible
16801 seiNal.pts = lastMatchedSample.pts;
16802 seiNal.dts = lastMatchedSample.dts;
16803 } else {
16804 result.logs.push({
16805 level: 'warn',
16806 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
16807 });
16808 break;
16809 }
16810
16811 result.seiNals.push(seiNal);
16812 break;
16813 }
16814 }
16815
16816 return result;
16817 };
16818 /**
16819 * Parses sample information out of Track Run Boxes and calculates
16820 * the absolute presentation and decode timestamps of each sample.
16821 *
16822 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
16823 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
16824 @see ISO-BMFF-12/2015, Section 8.8.12
16825 * @param {Object} tfhd - The parsed Track Fragment Header
16826 * @see inspect.parseTfhd
16827 * @return {Object[]} the parsed samples
16828 *
16829 * @see ISO-BMFF-12/2015, Section 8.8.8
16830 **/
16831
16832
16833 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
16834 var currentDts = baseMediaDecodeTime;
16835 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
16836 var defaultSampleSize = tfhd.defaultSampleSize || 0;
16837 var trackId = tfhd.trackId;
16838 var allSamples = [];
16839 truns.forEach(function (trun) {
16840 // Note: We currently do not parse the sample table as well
16841 // as the trun. It's possible some sources will require this.
16842 // moov > trak > mdia > minf > stbl
16843 var trackRun = parseTrun(trun);
16844 var samples = trackRun.samples;
16845 samples.forEach(function (sample) {
16846 if (sample.duration === undefined) {
16847 sample.duration = defaultSampleDuration;
16848 }
16849
16850 if (sample.size === undefined) {
16851 sample.size = defaultSampleSize;
16852 }
16853
16854 sample.trackId = trackId;
16855 sample.dts = currentDts;
16856
16857 if (sample.compositionTimeOffset === undefined) {
16858 sample.compositionTimeOffset = 0;
16859 }
16860
16861 if (typeof currentDts === 'bigint') {
16862 sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
16863 currentDts += window_1.BigInt(sample.duration);
16864 } else {
16865 sample.pts = currentDts + sample.compositionTimeOffset;
16866 currentDts += sample.duration;
16867 }
16868 });
16869 allSamples = allSamples.concat(samples);
16870 });
16871 return allSamples;
16872 };
16873 /**
16874 * Parses out caption nals from an FMP4 segment's video tracks.
16875 *
16876 * @param {Uint8Array} segment - The bytes of a single segment
16877 * @param {Number} videoTrackId - The trackId of a video track in the segment
16878 * @return {Object.<Number, Object[]>} A mapping of video trackId to
16879 * a list of seiNals found in that track
16880 **/
16881
16882
16883 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
16884 // To get the samples
16885 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
16886
16887 var mdats = findBox_1(segment, ['mdat']);
16888 var captionNals = {};
16889 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
16890
16891 mdats.forEach(function (mdat, index) {
16892 var matchingTraf = trafs[index];
16893 mdatTrafPairs.push({
16894 mdat: mdat,
16895 traf: matchingTraf
16896 });
16897 });
16898 mdatTrafPairs.forEach(function (pair) {
16899 var mdat = pair.mdat;
16900 var traf = pair.traf;
16901 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
16902
16903 var headerInfo = parseTfhd(tfhd[0]);
16904 var trackId = headerInfo.trackId;
16905 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
16906
16907 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
16908 var truns = findBox_1(traf, ['trun']);
16909 var samples;
16910 var result; // Only parse video data for the chosen video track
16911
16912 if (videoTrackId === trackId && truns.length > 0) {
16913 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
16914 result = findSeiNals(mdat, samples, trackId);
16915
16916 if (!captionNals[trackId]) {
16917 captionNals[trackId] = {
16918 seiNals: [],
16919 logs: []
16920 };
16921 }
16922
16923 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
16924 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
16925 }
16926 });
16927 return captionNals;
16928 };
16929 /**
16930 * Parses out inband captions from an MP4 container and returns
16931 * caption objects that can be used by WebVTT and the TextTrack API.
16932 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
16933 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
16934 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
16935 *
16936 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
16937 * @param {Number} trackId - The id of the video track to parse
16938 * @param {Number} timescale - The timescale for the video track from the init segment
16939 *
16940 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
16941 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
16942 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
16943 * @return {String} parsedCaptions[].text - The visible content of the caption
16944 **/
16945
16946
16947 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
16948 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
16949
16950 if (trackId === null) {
16951 return null;
16952 }
16953
16954 captionNals = parseCaptionNals(segment, trackId);
16955 var trackNals = captionNals[trackId] || {};
16956 return {
16957 seiNals: trackNals.seiNals,
16958 logs: trackNals.logs,
16959 timescale: timescale
16960 };
16961 };
16962 /**
16963 * Converts SEI NALUs into captions that can be used by video.js
16964 **/
16965
16966
16967 var CaptionParser = function CaptionParser() {
16968 var isInitialized = false;
16969 var captionStream; // Stores segments seen before trackId and timescale are set
16970
16971 var segmentCache; // Stores video track ID of the track being parsed
16972
16973 var trackId; // Stores the timescale of the track being parsed
16974
16975 var timescale; // Stores captions parsed so far
16976
16977 var parsedCaptions; // Stores whether we are receiving partial data or not
16978
16979 var parsingPartial;
16980 /**
16981 * A method to indicate whether a CaptionParser has been initalized
16982 * @returns {Boolean}
16983 **/
16984
16985 this.isInitialized = function () {
16986 return isInitialized;
16987 };
16988 /**
16989 * Initializes the underlying CaptionStream, SEI NAL parsing
16990 * and management, and caption collection
16991 **/
16992
16993
16994 this.init = function (options) {
16995 captionStream = new CaptionStream();
16996 isInitialized = true;
16997 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
16998
16999 captionStream.on('data', function (event) {
17000 // Convert to seconds in the source's timescale
17001 event.startTime = event.startPts / timescale;
17002 event.endTime = event.endPts / timescale;
17003 parsedCaptions.captions.push(event);
17004 parsedCaptions.captionStreams[event.stream] = true;
17005 });
17006 captionStream.on('log', function (log) {
17007 parsedCaptions.logs.push(log);
17008 });
17009 };
17010 /**
17011 * Determines if a new video track will be selected
17012 * or if the timescale changed
17013 * @return {Boolean}
17014 **/
17015
17016
17017 this.isNewInit = function (videoTrackIds, timescales) {
17018 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
17019 return false;
17020 }
17021
17022 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
17023 };
17024 /**
17025 * Parses out SEI captions and interacts with underlying
17026 * CaptionStream to return dispatched captions
17027 *
17028 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
17029 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
17030 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
17031 * @see parseEmbeddedCaptions
17032 * @see m2ts/caption-stream.js
17033 **/
17034
17035
17036 this.parse = function (segment, videoTrackIds, timescales) {
17037 var parsedData;
17038
17039 if (!this.isInitialized()) {
17040 return null; // This is not likely to be a video segment
17041 } else if (!videoTrackIds || !timescales) {
17042 return null;
17043 } else if (this.isNewInit(videoTrackIds, timescales)) {
17044 // Use the first video track only as there is no
17045 // mechanism to switch to other video tracks
17046 trackId = videoTrackIds[0];
17047 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
17048 // data until we have one.
17049 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
17050 } else if (trackId === null || !timescale) {
17051 segmentCache.push(segment);
17052 return null;
17053 } // Now that a timescale and trackId is set, parse cached segments
17054
17055
17056 while (segmentCache.length > 0) {
17057 var cachedSegment = segmentCache.shift();
17058 this.parse(cachedSegment, videoTrackIds, timescales);
17059 }
17060
17061 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
17062
17063 if (parsedData && parsedData.logs) {
17064 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
17065 }
17066
17067 if (parsedData === null || !parsedData.seiNals) {
17068 if (parsedCaptions.logs.length) {
17069 return {
17070 logs: parsedCaptions.logs,
17071 captions: [],
17072 captionStreams: []
17073 };
17074 }
17075
17076 return null;
17077 }
17078
17079 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
17080
17081 this.flushStream();
17082 return parsedCaptions;
17083 };
17084 /**
17085 * Pushes SEI NALUs onto CaptionStream
17086 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
17087 * Assumes that `parseCaptionNals` has been called first
17088 * @see m2ts/caption-stream.js
17089 **/
17090
17091
17092 this.pushNals = function (nals) {
17093 if (!this.isInitialized() || !nals || nals.length === 0) {
17094 return null;
17095 }
17096
17097 nals.forEach(function (nal) {
17098 captionStream.push(nal);
17099 });
17100 };
17101 /**
17102 * Flushes underlying CaptionStream to dispatch processed, displayable captions
17103 * @see m2ts/caption-stream.js
17104 **/
17105
17106
17107 this.flushStream = function () {
17108 if (!this.isInitialized()) {
17109 return null;
17110 }
17111
17112 if (!parsingPartial) {
17113 captionStream.flush();
17114 } else {
17115 captionStream.partialFlush();
17116 }
17117 };
17118 /**
17119 * Reset caption buckets for new data
17120 **/
17121
17122
17123 this.clearParsedCaptions = function () {
17124 parsedCaptions.captions = [];
17125 parsedCaptions.captionStreams = {};
17126 parsedCaptions.logs = [];
17127 };
17128 /**
17129 * Resets underlying CaptionStream
17130 * @see m2ts/caption-stream.js
17131 **/
17132
17133
17134 this.resetCaptionStream = function () {
17135 if (!this.isInitialized()) {
17136 return null;
17137 }
17138
17139 captionStream.reset();
17140 };
17141 /**
17142 * Convenience method to clear all captions flushed from the
17143 * CaptionStream and still being parsed
17144 * @see m2ts/caption-stream.js
17145 **/
17146
17147
17148 this.clearAllCaptions = function () {
17149 this.clearParsedCaptions();
17150 this.resetCaptionStream();
17151 };
17152 /**
17153 * Reset caption parser
17154 **/
17155
17156
17157 this.reset = function () {
17158 segmentCache = [];
17159 trackId = null;
17160 timescale = null;
17161
17162 if (!parsedCaptions) {
17163 parsedCaptions = {
17164 captions: [],
17165 // CC1, CC2, CC3, CC4
17166 captionStreams: {},
17167 logs: []
17168 };
17169 } else {
17170 this.clearParsedCaptions();
17171 }
17172
17173 this.resetCaptionStream();
17174 };
17175
17176 this.reset();
17177 };
17178
17179 var captionParser = CaptionParser;
17180 var toUnsigned = bin.toUnsigned;
17181 var toHexString = bin.toHexString;
17182 var getUint64 = numbers.getUint64;
17183 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
17184 /**
17185 * Parses an MP4 initialization segment and extracts the timescale
17186 * values for any declared tracks. Timescale values indicate the
17187 * number of clock ticks per second to assume for time-based values
17188 * elsewhere in the MP4.
17189 *
17190 * To determine the start time of an MP4, you need two pieces of
17191 * information: the timescale unit and the earliest base media decode
17192 * time. Multiple timescales can be specified within an MP4 but the
17193 * base media decode time is always expressed in the timescale from
17194 * the media header box for the track:
17195 * ```
17196 * moov > trak > mdia > mdhd.timescale
17197 * ```
17198 * @param init {Uint8Array} the bytes of the init segment
17199 * @return {object} a hash of track ids to timescale values or null if
17200 * the init segment is malformed.
17201 */
17202
17203 timescale = function timescale(init) {
17204 var result = {},
17205 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
17206
17207 return traks.reduce(function (result, trak) {
17208 var tkhd, version, index, id, mdhd;
17209 tkhd = findBox_1(trak, ['tkhd'])[0];
17210
17211 if (!tkhd) {
17212 return null;
17213 }
17214
17215 version = tkhd[0];
17216 index = version === 0 ? 12 : 20;
17217 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
17218 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17219
17220 if (!mdhd) {
17221 return null;
17222 }
17223
17224 version = mdhd[0];
17225 index = version === 0 ? 12 : 20;
17226 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17227 return result;
17228 }, result);
17229 };
17230 /**
17231 * Determine the base media decode start time, in seconds, for an MP4
17232 * fragment. If multiple fragments are specified, the earliest time is
17233 * returned.
17234 *
17235 * The base media decode time can be parsed from track fragment
17236 * metadata:
17237 * ```
17238 * moof > traf > tfdt.baseMediaDecodeTime
17239 * ```
17240 * It requires the timescale value from the mdhd to interpret.
17241 *
17242 * @param timescale {object} a hash of track ids to timescale values.
17243 * @return {number} the earliest base media decode start time for the
17244 * fragment, in seconds
17245 */
17246
17247
17248 startTime = function startTime(timescale, fragment) {
17249 var trafs; // we need info from two childrend of each track fragment box
17250
17251 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
17252
17253 var lowestTime = trafs.reduce(function (acc, traf) {
17254 var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
17255
17256 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
17257
17258 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
17259
17260 var tfdt = findBox_1(traf, ['tfdt'])[0];
17261 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
17262 var baseTime; // version 1 is 64 bit
17263
17264 if (tfdt[0] === 1) {
17265 baseTime = getUint64(tfdt.subarray(4, 12));
17266 } else {
17267 baseTime = dv.getUint32(4);
17268 } // convert base time to seconds if it is a valid number.
17269
17270
17271 var seconds;
17272
17273 if (typeof baseTime === 'bigint') {
17274 seconds = baseTime / window_1.BigInt(scale);
17275 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
17276 seconds = baseTime / scale;
17277 }
17278
17279 if (seconds < Number.MAX_SAFE_INTEGER) {
17280 seconds = Number(seconds);
17281 }
17282
17283 if (seconds < acc) {
17284 acc = seconds;
17285 }
17286
17287 return acc;
17288 }, Infinity);
17289 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
17290 };
17291 /**
17292 * Determine the composition start, in seconds, for an MP4
17293 * fragment.
17294 *
17295 * The composition start time of a fragment can be calculated using the base
17296 * media decode time, composition time offset, and timescale, as follows:
17297 *
17298 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
17299 *
17300 * All of the aforementioned information is contained within a media fragment's
17301 * `traf` box, except for timescale info, which comes from the initialization
17302 * segment, so a track id (also contained within a `traf`) is also necessary to
17303 * associate it with a timescale
17304 *
17305 *
17306 * @param timescales {object} - a hash of track ids to timescale values.
17307 * @param fragment {Unit8Array} - the bytes of a media segment
17308 * @return {number} the composition start time for the fragment, in seconds
17309 **/
17310
17311
17312 compositionStartTime = function compositionStartTime(timescales, fragment) {
17313 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
17314 var baseMediaDecodeTime = 0;
17315 var compositionTimeOffset = 0;
17316 var trackId;
17317
17318 if (trafBoxes && trafBoxes.length) {
17319 // The spec states that track run samples contained within a `traf` box are contiguous, but
17320 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
17321 // We will assume that they are, so we only need the first to calculate start time.
17322 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
17323 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
17324 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
17325
17326 if (tfhd) {
17327 var parsedTfhd = parseTfhd(tfhd);
17328 trackId = parsedTfhd.trackId;
17329 }
17330
17331 if (tfdt) {
17332 var parsedTfdt = parseTfdt(tfdt);
17333 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
17334 }
17335
17336 if (trun) {
17337 var parsedTrun = parseTrun(trun);
17338
17339 if (parsedTrun.samples && parsedTrun.samples.length) {
17340 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
17341 }
17342 }
17343 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
17344 // specified.
17345
17346
17347 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
17348
17349 if (typeof baseMediaDecodeTime === 'bigint') {
17350 compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
17351 timescale = window_1.BigInt(timescale);
17352 }
17353
17354 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
17355
17356 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
17357 result = Number(result);
17358 }
17359
17360 return result;
17361 };
17362 /**
17363 * Find the trackIds of the video tracks in this source.
17364 * Found by parsing the Handler Reference and Track Header Boxes:
17365 * moov > trak > mdia > hdlr
17366 * moov > trak > tkhd
17367 *
17368 * @param {Uint8Array} init - The bytes of the init segment for this source
17369 * @return {Number[]} A list of trackIds
17370 *
17371 * @see ISO-BMFF-12/2015, Section 8.4.3
17372 **/
17373
17374
17375 getVideoTrackIds = function getVideoTrackIds(init) {
17376 var traks = findBox_1(init, ['moov', 'trak']);
17377 var videoTrackIds = [];
17378 traks.forEach(function (trak) {
17379 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
17380 var tkhds = findBox_1(trak, ['tkhd']);
17381 hdlrs.forEach(function (hdlr, index) {
17382 var handlerType = parseType_1(hdlr.subarray(8, 12));
17383 var tkhd = tkhds[index];
17384 var view;
17385 var version;
17386 var trackId;
17387
17388 if (handlerType === 'vide') {
17389 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17390 version = view.getUint8(0);
17391 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
17392 videoTrackIds.push(trackId);
17393 }
17394 });
17395 });
17396 return videoTrackIds;
17397 };
17398
17399 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
17400 // mdhd is a FullBox, meaning it will have its own version as the first byte
17401 var version = mdhd[0];
17402 var index = version === 0 ? 12 : 20;
17403 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17404 };
17405 /**
17406 * Get all the video, audio, and hint tracks from a non fragmented
17407 * mp4 segment
17408 */
17409
17410
17411 getTracks = function getTracks(init) {
17412 var traks = findBox_1(init, ['moov', 'trak']);
17413 var tracks = [];
17414 traks.forEach(function (trak) {
17415 var track = {};
17416 var tkhd = findBox_1(trak, ['tkhd'])[0];
17417 var view, tkhdVersion; // id
17418
17419 if (tkhd) {
17420 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17421 tkhdVersion = view.getUint8(0);
17422 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
17423 }
17424
17425 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
17426
17427 if (hdlr) {
17428 var type = parseType_1(hdlr.subarray(8, 12));
17429
17430 if (type === 'vide') {
17431 track.type = 'video';
17432 } else if (type === 'soun') {
17433 track.type = 'audio';
17434 } else {
17435 track.type = type;
17436 }
17437 } // codec
17438
17439
17440 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
17441
17442 if (stsd) {
17443 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
17444
17445 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
17446 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
17447 var codecConfig, codecConfigType;
17448
17449 if (codecBox) {
17450 // https://tools.ietf.org/html/rfc6381#section-3.3
17451 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
17452 // we don't need anything but the "config" parameter of the
17453 // avc1 codecBox
17454 codecConfig = codecBox.subarray(78);
17455 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17456
17457 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
17458 track.codec += '.'; // left padded with zeroes for single digit hex
17459 // profile idc
17460
17461 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
17462
17463 track.codec += toHexString(codecConfig[10]); // level idc
17464
17465 track.codec += toHexString(codecConfig[11]);
17466 } else {
17467 // TODO: show a warning that we couldn't parse the codec
17468 // and are using the default
17469 track.codec = 'avc1.4d400d';
17470 }
17471 } else if (/^mp4[a,v]$/i.test(track.codec)) {
17472 // we do not need anything but the streamDescriptor of the mp4a codecBox
17473 codecConfig = codecBox.subarray(28);
17474 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17475
17476 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
17477 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
17478
17479 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
17480 } else {
17481 // TODO: show a warning that we couldn't parse the codec
17482 // and are using the default
17483 track.codec = 'mp4a.40.2';
17484 }
17485 } else {
17486 // flac, opus, etc
17487 track.codec = track.codec.toLowerCase();
17488 }
17489 }
17490 }
17491
17492 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17493
17494 if (mdhd) {
17495 track.timescale = getTimescaleFromMediaHeader(mdhd);
17496 }
17497
17498 tracks.push(track);
17499 });
17500 return tracks;
17501 };
17502
17503 var probe$2 = {
17504 // export mp4 inspector's findBox and parseType for backwards compatibility
17505 findBox: findBox_1,
17506 parseType: parseType_1,
17507 timescale: timescale,
17508 startTime: startTime,
17509 compositionStartTime: compositionStartTime,
17510 videoTrackIds: getVideoTrackIds,
17511 tracks: getTracks,
17512 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
17513 };
17514
17515 var parsePid = function parsePid(packet) {
17516 var pid = packet[1] & 0x1f;
17517 pid <<= 8;
17518 pid |= packet[2];
17519 return pid;
17520 };
17521
17522 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
17523 return !!(packet[1] & 0x40);
17524 };
17525
17526 var parseAdaptionField = function parseAdaptionField(packet) {
17527 var offset = 0; // if an adaption field is present, its length is specified by the
17528 // fifth byte of the TS packet header. The adaptation field is
17529 // used to add stuffing to PES packets that don't fill a complete
17530 // TS packet, and to specify some forms of timing and control data
17531 // that we do not currently use.
17532
17533 if ((packet[3] & 0x30) >>> 4 > 0x01) {
17534 offset += packet[4] + 1;
17535 }
17536
17537 return offset;
17538 };
17539
17540 var parseType = function parseType(packet, pmtPid) {
17541 var pid = parsePid(packet);
17542
17543 if (pid === 0) {
17544 return 'pat';
17545 } else if (pid === pmtPid) {
17546 return 'pmt';
17547 } else if (pmtPid) {
17548 return 'pes';
17549 }
17550
17551 return null;
17552 };
17553
17554 var parsePat = function parsePat(packet) {
17555 var pusi = parsePayloadUnitStartIndicator(packet);
17556 var offset = 4 + parseAdaptionField(packet);
17557
17558 if (pusi) {
17559 offset += packet[offset] + 1;
17560 }
17561
17562 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
17563 };
17564
17565 var parsePmt = function parsePmt(packet) {
17566 var programMapTable = {};
17567 var pusi = parsePayloadUnitStartIndicator(packet);
17568 var payloadOffset = 4 + parseAdaptionField(packet);
17569
17570 if (pusi) {
17571 payloadOffset += packet[payloadOffset] + 1;
17572 } // PMTs can be sent ahead of the time when they should actually
17573 // take effect. We don't believe this should ever be the case
17574 // for HLS but we'll ignore "forward" PMT declarations if we see
17575 // them. Future PMT declarations have the current_next_indicator
17576 // set to zero.
17577
17578
17579 if (!(packet[payloadOffset + 5] & 0x01)) {
17580 return;
17581 }
17582
17583 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
17584
17585 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
17586 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
17587 // long the program info descriptors are
17588
17589 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
17590
17591 var offset = 12 + programInfoLength;
17592
17593 while (offset < tableEnd) {
17594 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
17595
17596 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
17597 // skip past the elementary stream descriptors, if present
17598
17599 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
17600 }
17601
17602 return programMapTable;
17603 };
17604
17605 var parsePesType = function parsePesType(packet, programMapTable) {
17606 var pid = parsePid(packet);
17607 var type = programMapTable[pid];
17608
17609 switch (type) {
17610 case streamTypes.H264_STREAM_TYPE:
17611 return 'video';
17612
17613 case streamTypes.ADTS_STREAM_TYPE:
17614 return 'audio';
17615
17616 case streamTypes.METADATA_STREAM_TYPE:
17617 return 'timed-metadata';
17618
17619 default:
17620 return null;
17621 }
17622 };
17623
17624 var parsePesTime = function parsePesTime(packet) {
17625 var pusi = parsePayloadUnitStartIndicator(packet);
17626
17627 if (!pusi) {
17628 return null;
17629 }
17630
17631 var offset = 4 + parseAdaptionField(packet);
17632
17633 if (offset >= packet.byteLength) {
17634 // From the H 222.0 MPEG-TS spec
17635 // "For transport stream packets carrying PES packets, stuffing is needed when there
17636 // is insufficient PES packet data to completely fill the transport stream packet
17637 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
17638 // the sum of the lengths of the data elements in it, so that the payload bytes
17639 // remaining after the adaptation field exactly accommodates the available PES packet
17640 // data."
17641 //
17642 // If the offset is >= the length of the packet, then the packet contains no data
17643 // and instead is just adaption field stuffing bytes
17644 return null;
17645 }
17646
17647 var pes = null;
17648 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
17649 // and a DTS value. Determine what combination of values is
17650 // available to work with.
17651
17652 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
17653 // performs all bitwise operations on 32-bit integers but javascript
17654 // supports a much greater range (52-bits) of integer using standard
17655 // mathematical operations.
17656 // We construct a 31-bit value using bitwise operators over the 31
17657 // most significant bits and then multiply by 4 (equal to a left-shift
17658 // of 2) before we add the final 2 least significant bits of the
17659 // timestamp (equal to an OR.)
17660
17661 if (ptsDtsFlags & 0xC0) {
17662 pes = {}; // the PTS and DTS are not written out directly. For information
17663 // on how they are encoded, see
17664 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
17665
17666 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
17667 pes.pts *= 4; // Left shift by 2
17668
17669 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
17670
17671 pes.dts = pes.pts;
17672
17673 if (ptsDtsFlags & 0x40) {
17674 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
17675 pes.dts *= 4; // Left shift by 2
17676
17677 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
17678 }
17679 }
17680
17681 return pes;
17682 };
17683
17684 var parseNalUnitType = function parseNalUnitType(type) {
17685 switch (type) {
17686 case 0x05:
17687 return 'slice_layer_without_partitioning_rbsp_idr';
17688
17689 case 0x06:
17690 return 'sei_rbsp';
17691
17692 case 0x07:
17693 return 'seq_parameter_set_rbsp';
17694
17695 case 0x08:
17696 return 'pic_parameter_set_rbsp';
17697
17698 case 0x09:
17699 return 'access_unit_delimiter_rbsp';
17700
17701 default:
17702 return null;
17703 }
17704 };
17705
17706 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
17707 var offset = 4 + parseAdaptionField(packet);
17708 var frameBuffer = packet.subarray(offset);
17709 var frameI = 0;
17710 var frameSyncPoint = 0;
17711 var foundKeyFrame = false;
17712 var nalType; // advance the sync point to a NAL start, if necessary
17713
17714 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
17715 if (frameBuffer[frameSyncPoint + 2] === 1) {
17716 // the sync point is properly aligned
17717 frameI = frameSyncPoint + 5;
17718 break;
17719 }
17720 }
17721
17722 while (frameI < frameBuffer.byteLength) {
17723 // look at the current byte to determine if we've hit the end of
17724 // a NAL unit boundary
17725 switch (frameBuffer[frameI]) {
17726 case 0:
17727 // skip past non-sync sequences
17728 if (frameBuffer[frameI - 1] !== 0) {
17729 frameI += 2;
17730 break;
17731 } else if (frameBuffer[frameI - 2] !== 0) {
17732 frameI++;
17733 break;
17734 }
17735
17736 if (frameSyncPoint + 3 !== frameI - 2) {
17737 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17738
17739 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17740 foundKeyFrame = true;
17741 }
17742 } // drop trailing zeroes
17743
17744
17745 do {
17746 frameI++;
17747 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
17748
17749 frameSyncPoint = frameI - 2;
17750 frameI += 3;
17751 break;
17752
17753 case 1:
17754 // skip past non-sync sequences
17755 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
17756 frameI += 3;
17757 break;
17758 }
17759
17760 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17761
17762 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17763 foundKeyFrame = true;
17764 }
17765
17766 frameSyncPoint = frameI - 2;
17767 frameI += 3;
17768 break;
17769
17770 default:
17771 // the current byte isn't a one or zero, so it cannot be part
17772 // of a sync sequence
17773 frameI += 3;
17774 break;
17775 }
17776 }
17777
17778 frameBuffer = frameBuffer.subarray(frameSyncPoint);
17779 frameI -= frameSyncPoint;
17780 frameSyncPoint = 0; // parse the final nal
17781
17782 if (frameBuffer && frameBuffer.byteLength > 3) {
17783 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17784
17785 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17786 foundKeyFrame = true;
17787 }
17788 }
17789
17790 return foundKeyFrame;
17791 };
17792
17793 var probe$1 = {
17794 parseType: parseType,
17795 parsePat: parsePat,
17796 parsePmt: parsePmt,
17797 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
17798 parsePesType: parsePesType,
17799 parsePesTime: parsePesTime,
17800 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
17801 };
17802 var handleRollover = timestampRolloverStream.handleRollover;
17803 var probe = {};
17804 probe.ts = probe$1;
17805 probe.aac = utils;
17806 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
17807 var MP2T_PACKET_LENGTH = 188,
17808 // bytes
17809 SYNC_BYTE = 0x47;
17810 /**
17811 * walks through segment data looking for pat and pmt packets to parse out
17812 * program map table information
17813 */
17814
17815 var parsePsi_ = function parsePsi_(bytes, pmt) {
17816 var startIndex = 0,
17817 endIndex = MP2T_PACKET_LENGTH,
17818 packet,
17819 type;
17820
17821 while (endIndex < bytes.byteLength) {
17822 // Look for a pair of start and end sync bytes in the data..
17823 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17824 // We found a packet
17825 packet = bytes.subarray(startIndex, endIndex);
17826 type = probe.ts.parseType(packet, pmt.pid);
17827
17828 switch (type) {
17829 case 'pat':
17830 pmt.pid = probe.ts.parsePat(packet);
17831 break;
17832
17833 case 'pmt':
17834 var table = probe.ts.parsePmt(packet);
17835 pmt.table = pmt.table || {};
17836 Object.keys(table).forEach(function (key) {
17837 pmt.table[key] = table[key];
17838 });
17839 break;
17840 }
17841
17842 startIndex += MP2T_PACKET_LENGTH;
17843 endIndex += MP2T_PACKET_LENGTH;
17844 continue;
17845 } // If we get here, we have somehow become de-synchronized and we need to step
17846 // forward one byte at a time until we find a pair of sync bytes that denote
17847 // a packet
17848
17849
17850 startIndex++;
17851 endIndex++;
17852 }
17853 };
17854 /**
17855 * walks through the segment data from the start and end to get timing information
17856 * for the first and last audio pes packets
17857 */
17858
17859
17860 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
17861 var startIndex = 0,
17862 endIndex = MP2T_PACKET_LENGTH,
17863 packet,
17864 type,
17865 pesType,
17866 pusi,
17867 parsed;
17868 var endLoop = false; // Start walking from start of segment to get first audio packet
17869
17870 while (endIndex <= bytes.byteLength) {
17871 // Look for a pair of start and end sync bytes in the data..
17872 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17873 // We found a packet
17874 packet = bytes.subarray(startIndex, endIndex);
17875 type = probe.ts.parseType(packet, pmt.pid);
17876
17877 switch (type) {
17878 case 'pes':
17879 pesType = probe.ts.parsePesType(packet, pmt.table);
17880 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17881
17882 if (pesType === 'audio' && pusi) {
17883 parsed = probe.ts.parsePesTime(packet);
17884
17885 if (parsed) {
17886 parsed.type = 'audio';
17887 result.audio.push(parsed);
17888 endLoop = true;
17889 }
17890 }
17891
17892 break;
17893 }
17894
17895 if (endLoop) {
17896 break;
17897 }
17898
17899 startIndex += MP2T_PACKET_LENGTH;
17900 endIndex += MP2T_PACKET_LENGTH;
17901 continue;
17902 } // If we get here, we have somehow become de-synchronized and we need to step
17903 // forward one byte at a time until we find a pair of sync bytes that denote
17904 // a packet
17905
17906
17907 startIndex++;
17908 endIndex++;
17909 } // Start walking from end of segment to get last audio packet
17910
17911
17912 endIndex = bytes.byteLength;
17913 startIndex = endIndex - MP2T_PACKET_LENGTH;
17914 endLoop = false;
17915
17916 while (startIndex >= 0) {
17917 // Look for a pair of start and end sync bytes in the data..
17918 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17919 // We found a packet
17920 packet = bytes.subarray(startIndex, endIndex);
17921 type = probe.ts.parseType(packet, pmt.pid);
17922
17923 switch (type) {
17924 case 'pes':
17925 pesType = probe.ts.parsePesType(packet, pmt.table);
17926 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17927
17928 if (pesType === 'audio' && pusi) {
17929 parsed = probe.ts.parsePesTime(packet);
17930
17931 if (parsed) {
17932 parsed.type = 'audio';
17933 result.audio.push(parsed);
17934 endLoop = true;
17935 }
17936 }
17937
17938 break;
17939 }
17940
17941 if (endLoop) {
17942 break;
17943 }
17944
17945 startIndex -= MP2T_PACKET_LENGTH;
17946 endIndex -= MP2T_PACKET_LENGTH;
17947 continue;
17948 } // If we get here, we have somehow become de-synchronized and we need to step
17949 // forward one byte at a time until we find a pair of sync bytes that denote
17950 // a packet
17951
17952
17953 startIndex--;
17954 endIndex--;
17955 }
17956 };
17957 /**
17958 * walks through the segment data from the start and end to get timing information
17959 * for the first and last video pes packets as well as timing information for the first
17960 * key frame.
17961 */
17962
17963
17964 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
17965 var startIndex = 0,
17966 endIndex = MP2T_PACKET_LENGTH,
17967 packet,
17968 type,
17969 pesType,
17970 pusi,
17971 parsed,
17972 frame,
17973 i,
17974 pes;
17975 var endLoop = false;
17976 var currentFrame = {
17977 data: [],
17978 size: 0
17979 }; // Start walking from start of segment to get first video packet
17980
17981 while (endIndex < bytes.byteLength) {
17982 // Look for a pair of start and end sync bytes in the data..
17983 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17984 // We found a packet
17985 packet = bytes.subarray(startIndex, endIndex);
17986 type = probe.ts.parseType(packet, pmt.pid);
17987
17988 switch (type) {
17989 case 'pes':
17990 pesType = probe.ts.parsePesType(packet, pmt.table);
17991 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17992
17993 if (pesType === 'video') {
17994 if (pusi && !endLoop) {
17995 parsed = probe.ts.parsePesTime(packet);
17996
17997 if (parsed) {
17998 parsed.type = 'video';
17999 result.video.push(parsed);
18000 endLoop = true;
18001 }
18002 }
18003
18004 if (!result.firstKeyFrame) {
18005 if (pusi) {
18006 if (currentFrame.size !== 0) {
18007 frame = new Uint8Array(currentFrame.size);
18008 i = 0;
18009
18010 while (currentFrame.data.length) {
18011 pes = currentFrame.data.shift();
18012 frame.set(pes, i);
18013 i += pes.byteLength;
18014 }
18015
18016 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
18017 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
18018 // the keyframe seems to work fine with HLS playback
18019 // and definitely preferable to a crash with TypeError...
18020
18021 if (firstKeyFrame) {
18022 result.firstKeyFrame = firstKeyFrame;
18023 result.firstKeyFrame.type = 'video';
18024 } else {
18025 // eslint-disable-next-line
18026 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
18027 }
18028 }
18029
18030 currentFrame.size = 0;
18031 }
18032 }
18033
18034 currentFrame.data.push(packet);
18035 currentFrame.size += packet.byteLength;
18036 }
18037 }
18038
18039 break;
18040 }
18041
18042 if (endLoop && result.firstKeyFrame) {
18043 break;
18044 }
18045
18046 startIndex += MP2T_PACKET_LENGTH;
18047 endIndex += MP2T_PACKET_LENGTH;
18048 continue;
18049 } // If we get here, we have somehow become de-synchronized and we need to step
18050 // forward one byte at a time until we find a pair of sync bytes that denote
18051 // a packet
18052
18053
18054 startIndex++;
18055 endIndex++;
18056 } // Start walking from end of segment to get last video packet
18057
18058
18059 endIndex = bytes.byteLength;
18060 startIndex = endIndex - MP2T_PACKET_LENGTH;
18061 endLoop = false;
18062
18063 while (startIndex >= 0) {
18064 // Look for a pair of start and end sync bytes in the data..
18065 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
18066 // We found a packet
18067 packet = bytes.subarray(startIndex, endIndex);
18068 type = probe.ts.parseType(packet, pmt.pid);
18069
18070 switch (type) {
18071 case 'pes':
18072 pesType = probe.ts.parsePesType(packet, pmt.table);
18073 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18074
18075 if (pesType === 'video' && pusi) {
18076 parsed = probe.ts.parsePesTime(packet);
18077
18078 if (parsed) {
18079 parsed.type = 'video';
18080 result.video.push(parsed);
18081 endLoop = true;
18082 }
18083 }
18084
18085 break;
18086 }
18087
18088 if (endLoop) {
18089 break;
18090 }
18091
18092 startIndex -= MP2T_PACKET_LENGTH;
18093 endIndex -= MP2T_PACKET_LENGTH;
18094 continue;
18095 } // If we get here, we have somehow become de-synchronized and we need to step
18096 // forward one byte at a time until we find a pair of sync bytes that denote
18097 // a packet
18098
18099
18100 startIndex--;
18101 endIndex--;
18102 }
18103 };
18104 /**
18105 * Adjusts the timestamp information for the segment to account for
18106 * rollover and convert to seconds based on pes packet timescale (90khz clock)
18107 */
18108
18109
18110 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
18111 if (segmentInfo.audio && segmentInfo.audio.length) {
18112 var audioBaseTimestamp = baseTimestamp;
18113
18114 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
18115 audioBaseTimestamp = segmentInfo.audio[0].dts;
18116 }
18117
18118 segmentInfo.audio.forEach(function (info) {
18119 info.dts = handleRollover(info.dts, audioBaseTimestamp);
18120 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
18121
18122 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18123 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18124 });
18125 }
18126
18127 if (segmentInfo.video && segmentInfo.video.length) {
18128 var videoBaseTimestamp = baseTimestamp;
18129
18130 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
18131 videoBaseTimestamp = segmentInfo.video[0].dts;
18132 }
18133
18134 segmentInfo.video.forEach(function (info) {
18135 info.dts = handleRollover(info.dts, videoBaseTimestamp);
18136 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
18137
18138 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18139 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18140 });
18141
18142 if (segmentInfo.firstKeyFrame) {
18143 var frame = segmentInfo.firstKeyFrame;
18144 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
18145 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
18146
18147 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
18148 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
18149 }
18150 }
18151 };
18152 /**
18153 * inspects the aac data stream for start and end time information
18154 */
18155
18156
18157 var inspectAac_ = function inspectAac_(bytes) {
18158 var endLoop = false,
18159 audioCount = 0,
18160 sampleRate = null,
18161 timestamp = null,
18162 frameSize = 0,
18163 byteIndex = 0,
18164 packet;
18165
18166 while (bytes.length - byteIndex >= 3) {
18167 var type = probe.aac.parseType(bytes, byteIndex);
18168
18169 switch (type) {
18170 case 'timed-metadata':
18171 // Exit early because we don't have enough to parse
18172 // the ID3 tag header
18173 if (bytes.length - byteIndex < 10) {
18174 endLoop = true;
18175 break;
18176 }
18177
18178 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18179 // to emit a full packet
18180
18181 if (frameSize > bytes.length) {
18182 endLoop = true;
18183 break;
18184 }
18185
18186 if (timestamp === null) {
18187 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18188 timestamp = probe.aac.parseAacTimestamp(packet);
18189 }
18190
18191 byteIndex += frameSize;
18192 break;
18193
18194 case 'audio':
18195 // Exit early because we don't have enough to parse
18196 // the ADTS frame header
18197 if (bytes.length - byteIndex < 7) {
18198 endLoop = true;
18199 break;
18200 }
18201
18202 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18203 // to emit a full packet
18204
18205 if (frameSize > bytes.length) {
18206 endLoop = true;
18207 break;
18208 }
18209
18210 if (sampleRate === null) {
18211 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18212 sampleRate = probe.aac.parseSampleRate(packet);
18213 }
18214
18215 audioCount++;
18216 byteIndex += frameSize;
18217 break;
18218
18219 default:
18220 byteIndex++;
18221 break;
18222 }
18223
18224 if (endLoop) {
18225 return null;
18226 }
18227 }
18228
18229 if (sampleRate === null || timestamp === null) {
18230 return null;
18231 }
18232
18233 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
18234 var result = {
18235 audio: [{
18236 type: 'audio',
18237 dts: timestamp,
18238 pts: timestamp
18239 }, {
18240 type: 'audio',
18241 dts: timestamp + audioCount * 1024 * audioTimescale,
18242 pts: timestamp + audioCount * 1024 * audioTimescale
18243 }]
18244 };
18245 return result;
18246 };
18247 /**
18248 * inspects the transport stream segment data for start and end time information
18249 * of the audio and video tracks (when present) as well as the first key frame's
18250 * start time.
18251 */
18252
18253
18254 var inspectTs_ = function inspectTs_(bytes) {
18255 var pmt = {
18256 pid: null,
18257 table: null
18258 };
18259 var result = {};
18260 parsePsi_(bytes, pmt);
18261
18262 for (var pid in pmt.table) {
18263 if (pmt.table.hasOwnProperty(pid)) {
18264 var type = pmt.table[pid];
18265
18266 switch (type) {
18267 case streamTypes.H264_STREAM_TYPE:
18268 result.video = [];
18269 parseVideoPes_(bytes, pmt, result);
18270
18271 if (result.video.length === 0) {
18272 delete result.video;
18273 }
18274
18275 break;
18276
18277 case streamTypes.ADTS_STREAM_TYPE:
18278 result.audio = [];
18279 parseAudioPes_(bytes, pmt, result);
18280
18281 if (result.audio.length === 0) {
18282 delete result.audio;
18283 }
18284
18285 break;
18286 }
18287 }
18288 }
18289
18290 return result;
18291 };
18292 /**
18293 * Inspects segment byte data and returns an object with start and end timing information
18294 *
18295 * @param {Uint8Array} bytes The segment byte data
18296 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
18297 * timestamps for rollover. This value must be in 90khz clock.
18298 * @return {Object} Object containing start and end frame timing info of segment.
18299 */
18300
18301
18302 var inspect = function inspect(bytes, baseTimestamp) {
18303 var isAacData = probe.aac.isLikelyAacData(bytes);
18304 var result;
18305
18306 if (isAacData) {
18307 result = inspectAac_(bytes);
18308 } else {
18309 result = inspectTs_(bytes);
18310 }
18311
18312 if (!result || !result.audio && !result.video) {
18313 return null;
18314 }
18315
18316 adjustTimestamp_(result, baseTimestamp);
18317 return result;
18318 };
18319
18320 var tsInspector = {
18321 inspect: inspect,
18322 parseAudioPes_: parseAudioPes_
18323 };
18324 /* global self */
18325
18326 /**
18327 * Re-emits transmuxer events by converting them into messages to the
18328 * world outside the worker.
18329 *
18330 * @param {Object} transmuxer the transmuxer to wire events on
18331 * @private
18332 */
18333
18334 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
18335 transmuxer.on('data', function (segment) {
18336 // transfer ownership of the underlying ArrayBuffer
18337 // instead of doing a copy to save memory
18338 // ArrayBuffers are transferable but generic TypedArrays are not
18339 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
18340 var initArray = segment.initSegment;
18341 segment.initSegment = {
18342 data: initArray.buffer,
18343 byteOffset: initArray.byteOffset,
18344 byteLength: initArray.byteLength
18345 };
18346 var typedArray = segment.data;
18347 segment.data = typedArray.buffer;
18348 self.postMessage({
18349 action: 'data',
18350 segment: segment,
18351 byteOffset: typedArray.byteOffset,
18352 byteLength: typedArray.byteLength
18353 }, [segment.data]);
18354 });
18355 transmuxer.on('done', function (data) {
18356 self.postMessage({
18357 action: 'done'
18358 });
18359 });
18360 transmuxer.on('gopInfo', function (gopInfo) {
18361 self.postMessage({
18362 action: 'gopInfo',
18363 gopInfo: gopInfo
18364 });
18365 });
18366 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
18367 var videoSegmentTimingInfo = {
18368 start: {
18369 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18370 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18371 },
18372 end: {
18373 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18374 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18375 },
18376 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18377 };
18378
18379 if (timingInfo.prependedContentDuration) {
18380 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18381 }
18382
18383 self.postMessage({
18384 action: 'videoSegmentTimingInfo',
18385 videoSegmentTimingInfo: videoSegmentTimingInfo
18386 });
18387 });
18388 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
18389 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
18390 var audioSegmentTimingInfo = {
18391 start: {
18392 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18393 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18394 },
18395 end: {
18396 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18397 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18398 },
18399 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18400 };
18401
18402 if (timingInfo.prependedContentDuration) {
18403 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18404 }
18405
18406 self.postMessage({
18407 action: 'audioSegmentTimingInfo',
18408 audioSegmentTimingInfo: audioSegmentTimingInfo
18409 });
18410 });
18411 transmuxer.on('id3Frame', function (id3Frame) {
18412 self.postMessage({
18413 action: 'id3Frame',
18414 id3Frame: id3Frame
18415 });
18416 });
18417 transmuxer.on('caption', function (caption) {
18418 self.postMessage({
18419 action: 'caption',
18420 caption: caption
18421 });
18422 });
18423 transmuxer.on('trackinfo', function (trackInfo) {
18424 self.postMessage({
18425 action: 'trackinfo',
18426 trackInfo: trackInfo
18427 });
18428 });
18429 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
18430 // convert to video TS since we prioritize video time over audio
18431 self.postMessage({
18432 action: 'audioTimingInfo',
18433 audioTimingInfo: {
18434 start: clock.videoTsToSeconds(audioTimingInfo.start),
18435 end: clock.videoTsToSeconds(audioTimingInfo.end)
18436 }
18437 });
18438 });
18439 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
18440 self.postMessage({
18441 action: 'videoTimingInfo',
18442 videoTimingInfo: {
18443 start: clock.videoTsToSeconds(videoTimingInfo.start),
18444 end: clock.videoTsToSeconds(videoTimingInfo.end)
18445 }
18446 });
18447 });
18448 transmuxer.on('log', function (log) {
18449 self.postMessage({
18450 action: 'log',
18451 log: log
18452 });
18453 });
18454 };
18455 /**
18456 * All incoming messages route through this hash. If no function exists
18457 * to handle an incoming message, then we ignore the message.
18458 *
18459 * @class MessageHandlers
18460 * @param {Object} options the options to initialize with
18461 */
18462
18463
18464 var MessageHandlers = /*#__PURE__*/function () {
18465 function MessageHandlers(self, options) {
18466 this.options = options || {};
18467 this.self = self;
18468 this.init();
18469 }
18470 /**
18471 * initialize our web worker and wire all the events.
18472 */
18473
18474
18475 var _proto = MessageHandlers.prototype;
18476
18477 _proto.init = function init() {
18478 if (this.transmuxer) {
18479 this.transmuxer.dispose();
18480 }
18481
18482 this.transmuxer = new transmuxer.Transmuxer(this.options);
18483 wireTransmuxerEvents(this.self, this.transmuxer);
18484 };
18485
18486 _proto.pushMp4Captions = function pushMp4Captions(data) {
18487 if (!this.captionParser) {
18488 this.captionParser = new captionParser();
18489 this.captionParser.init();
18490 }
18491
18492 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18493 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
18494 this.self.postMessage({
18495 action: 'mp4Captions',
18496 captions: parsed && parsed.captions || [],
18497 logs: parsed && parsed.logs || [],
18498 data: segment.buffer
18499 }, [segment.buffer]);
18500 };
18501
18502 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
18503 var timescales = _ref.timescales,
18504 data = _ref.data;
18505 var startTime = probe$2.startTime(timescales, data);
18506 this.self.postMessage({
18507 action: 'probeMp4StartTime',
18508 startTime: startTime,
18509 data: data
18510 }, [data.buffer]);
18511 };
18512
18513 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
18514 var data = _ref2.data;
18515 var tracks = probe$2.tracks(data);
18516 this.self.postMessage({
18517 action: 'probeMp4Tracks',
18518 tracks: tracks,
18519 data: data
18520 }, [data.buffer]);
18521 }
18522 /**
18523 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
18524 * internal "media time," as well as whether it contains video and/or audio.
18525 *
18526 * @private
18527 * @param {Uint8Array} bytes - segment bytes
18528 * @param {number} baseStartTime
18529 * Relative reference timestamp used when adjusting frame timestamps for rollover.
18530 * This value should be in seconds, as it's converted to a 90khz clock within the
18531 * function body.
18532 * @return {Object} The start time of the current segment in "media time" as well as
18533 * whether it contains video and/or audio
18534 */
18535 ;
18536
18537 _proto.probeTs = function probeTs(_ref3) {
18538 var data = _ref3.data,
18539 baseStartTime = _ref3.baseStartTime;
18540 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
18541 var timeInfo = tsInspector.inspect(data, tsStartTime);
18542 var result = null;
18543
18544 if (timeInfo) {
18545 result = {
18546 // each type's time info comes back as an array of 2 times, start and end
18547 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
18548 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
18549 };
18550
18551 if (result.hasVideo) {
18552 result.videoStart = timeInfo.video[0].ptsTime;
18553 }
18554
18555 if (result.hasAudio) {
18556 result.audioStart = timeInfo.audio[0].ptsTime;
18557 }
18558 }
18559
18560 this.self.postMessage({
18561 action: 'probeTs',
18562 result: result,
18563 data: data
18564 }, [data.buffer]);
18565 };
18566
18567 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
18568 if (this.captionParser) {
18569 this.captionParser.clearAllCaptions();
18570 }
18571 };
18572
18573 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
18574 if (this.captionParser) {
18575 this.captionParser.clearParsedCaptions();
18576 }
18577 }
18578 /**
18579 * Adds data (a ts segment) to the start of the transmuxer pipeline for
18580 * processing.
18581 *
18582 * @param {ArrayBuffer} data data to push into the muxer
18583 */
18584 ;
18585
18586 _proto.push = function push(data) {
18587 // Cast array buffer to correct type for transmuxer
18588 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18589 this.transmuxer.push(segment);
18590 }
18591 /**
18592 * Recreate the transmuxer so that the next segment added via `push`
18593 * start with a fresh transmuxer.
18594 */
18595 ;
18596
18597 _proto.reset = function reset() {
18598 this.transmuxer.reset();
18599 }
18600 /**
18601 * Set the value that will be used as the `baseMediaDecodeTime` time for the
18602 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
18603 * set relative to the first based on the PTS values.
18604 *
18605 * @param {Object} data used to set the timestamp offset in the muxer
18606 */
18607 ;
18608
18609 _proto.setTimestampOffset = function setTimestampOffset(data) {
18610 var timestampOffset = data.timestampOffset || 0;
18611 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
18612 };
18613
18614 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
18615 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
18616 };
18617
18618 _proto.setRemux = function setRemux(data) {
18619 this.transmuxer.setRemux(data.remux);
18620 }
18621 /**
18622 * Forces the pipeline to finish processing the last segment and emit it's
18623 * results.
18624 *
18625 * @param {Object} data event data, not really used
18626 */
18627 ;
18628
18629 _proto.flush = function flush(data) {
18630 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
18631
18632 self.postMessage({
18633 action: 'done',
18634 type: 'transmuxed'
18635 });
18636 };
18637
18638 _proto.endTimeline = function endTimeline() {
18639 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
18640 // timelines
18641
18642 self.postMessage({
18643 action: 'endedtimeline',
18644 type: 'transmuxed'
18645 });
18646 };
18647
18648 _proto.alignGopsWith = function alignGopsWith(data) {
18649 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
18650 };
18651
18652 return MessageHandlers;
18653 }();
18654 /**
18655 * Our web worker interface so that things can talk to mux.js
18656 * that will be running in a web worker. the scope is passed to this by
18657 * webworkify.
18658 *
18659 * @param {Object} self the scope for the web worker
18660 */
18661
18662
18663 self.onmessage = function (event) {
18664 if (event.data.action === 'init' && event.data.options) {
18665 this.messageHandlers = new MessageHandlers(self, event.data.options);
18666 return;
18667 }
18668
18669 if (!this.messageHandlers) {
18670 this.messageHandlers = new MessageHandlers(self);
18671 }
18672
18673 if (event.data && event.data.action && event.data.action !== 'init') {
18674 if (this.messageHandlers[event.data.action]) {
18675 this.messageHandlers[event.data.action](event.data);
18676 }
18677 }
18678 };
18679 }));
18680 var TransmuxWorker = factory(workerCode$1);
18681 /* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
18682
18683 var handleData_ = function handleData_(event, transmuxedData, callback) {
18684 var _event$data$segment = event.data.segment,
18685 type = _event$data$segment.type,
18686 initSegment = _event$data$segment.initSegment,
18687 captions = _event$data$segment.captions,
18688 captionStreams = _event$data$segment.captionStreams,
18689 metadata = _event$data$segment.metadata,
18690 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
18691 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
18692 transmuxedData.buffer.push({
18693 captions: captions,
18694 captionStreams: captionStreams,
18695 metadata: metadata
18696 });
18697 var boxes = event.data.segment.boxes || {
18698 data: event.data.segment.data
18699 };
18700 var result = {
18701 type: type,
18702 // cast ArrayBuffer to TypedArray
18703 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
18704 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
18705 };
18706
18707 if (typeof videoFrameDtsTime !== 'undefined') {
18708 result.videoFrameDtsTime = videoFrameDtsTime;
18709 }
18710
18711 if (typeof videoFramePtsTime !== 'undefined') {
18712 result.videoFramePtsTime = videoFramePtsTime;
18713 }
18714
18715 callback(result);
18716 };
18717 var handleDone_ = function handleDone_(_ref) {
18718 var transmuxedData = _ref.transmuxedData,
18719 callback = _ref.callback;
18720 // Previously we only returned data on data events,
18721 // not on done events. Clear out the buffer to keep that consistent.
18722 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
18723 // have received
18724
18725 callback(transmuxedData);
18726 };
18727 var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
18728 transmuxedData.gopInfo = event.data.gopInfo;
18729 };
18730 var processTransmux = function processTransmux(options) {
18731 var transmuxer = options.transmuxer,
18732 bytes = options.bytes,
18733 audioAppendStart = options.audioAppendStart,
18734 gopsToAlignWith = options.gopsToAlignWith,
18735 remux = options.remux,
18736 onData = options.onData,
18737 onTrackInfo = options.onTrackInfo,
18738 onAudioTimingInfo = options.onAudioTimingInfo,
18739 onVideoTimingInfo = options.onVideoTimingInfo,
18740 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
18741 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
18742 onId3 = options.onId3,
18743 onCaptions = options.onCaptions,
18744 onDone = options.onDone,
18745 onEndedTimeline = options.onEndedTimeline,
18746 onTransmuxerLog = options.onTransmuxerLog,
18747 isEndOfTimeline = options.isEndOfTimeline;
18748 var transmuxedData = {
18749 buffer: []
18750 };
18751 var waitForEndedTimelineEvent = isEndOfTimeline;
18752
18753 var handleMessage = function handleMessage(event) {
18754 if (transmuxer.currentTransmux !== options) {
18755 // disposed
18756 return;
18757 }
18758
18759 if (event.data.action === 'data') {
18760 handleData_(event, transmuxedData, onData);
18761 }
18762
18763 if (event.data.action === 'trackinfo') {
18764 onTrackInfo(event.data.trackInfo);
18765 }
18766
18767 if (event.data.action === 'gopInfo') {
18768 handleGopInfo_(event, transmuxedData);
18769 }
18770
18771 if (event.data.action === 'audioTimingInfo') {
18772 onAudioTimingInfo(event.data.audioTimingInfo);
18773 }
18774
18775 if (event.data.action === 'videoTimingInfo') {
18776 onVideoTimingInfo(event.data.videoTimingInfo);
18777 }
18778
18779 if (event.data.action === 'videoSegmentTimingInfo') {
18780 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
18781 }
18782
18783 if (event.data.action === 'audioSegmentTimingInfo') {
18784 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
18785 }
18786
18787 if (event.data.action === 'id3Frame') {
18788 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
18789 }
18790
18791 if (event.data.action === 'caption') {
18792 onCaptions(event.data.caption);
18793 }
18794
18795 if (event.data.action === 'endedtimeline') {
18796 waitForEndedTimelineEvent = false;
18797 onEndedTimeline();
18798 }
18799
18800 if (event.data.action === 'log') {
18801 onTransmuxerLog(event.data.log);
18802 } // wait for the transmuxed event since we may have audio and video
18803
18804
18805 if (event.data.type !== 'transmuxed') {
18806 return;
18807 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
18808 // of a timeline, that means there may still be data events before the segment
18809 // processing can be considerred complete. In that case, the final event should be
18810 // an "endedtimeline" event with the type "transmuxed."
18811
18812
18813 if (waitForEndedTimelineEvent) {
18814 return;
18815 }
18816
18817 transmuxer.onmessage = null;
18818 handleDone_({
18819 transmuxedData: transmuxedData,
18820 callback: onDone
18821 });
18822 /* eslint-disable no-use-before-define */
18823
18824 dequeue(transmuxer);
18825 /* eslint-enable */
18826 };
18827
18828 transmuxer.onmessage = handleMessage;
18829
18830 if (audioAppendStart) {
18831 transmuxer.postMessage({
18832 action: 'setAudioAppendStart',
18833 appendStart: audioAppendStart
18834 });
18835 } // allow empty arrays to be passed to clear out GOPs
18836
18837
18838 if (Array.isArray(gopsToAlignWith)) {
18839 transmuxer.postMessage({
18840 action: 'alignGopsWith',
18841 gopsToAlignWith: gopsToAlignWith
18842 });
18843 }
18844
18845 if (typeof remux !== 'undefined') {
18846 transmuxer.postMessage({
18847 action: 'setRemux',
18848 remux: remux
18849 });
18850 }
18851
18852 if (bytes.byteLength) {
18853 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
18854 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
18855 transmuxer.postMessage({
18856 action: 'push',
18857 // Send the typed-array of data as an ArrayBuffer so that
18858 // it can be sent as a "Transferable" and avoid the costly
18859 // memory copy
18860 data: buffer,
18861 // To recreate the original typed-array, we need information
18862 // about what portion of the ArrayBuffer it was a view into
18863 byteOffset: byteOffset,
18864 byteLength: bytes.byteLength
18865 }, [buffer]);
18866 }
18867
18868 if (isEndOfTimeline) {
18869 transmuxer.postMessage({
18870 action: 'endTimeline'
18871 });
18872 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
18873 // the end of the segment
18874
18875
18876 transmuxer.postMessage({
18877 action: 'flush'
18878 });
18879 };
18880 var dequeue = function dequeue(transmuxer) {
18881 transmuxer.currentTransmux = null;
18882
18883 if (transmuxer.transmuxQueue.length) {
18884 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
18885
18886 if (typeof transmuxer.currentTransmux === 'function') {
18887 transmuxer.currentTransmux();
18888 } else {
18889 processTransmux(transmuxer.currentTransmux);
18890 }
18891 }
18892 };
18893 var processAction = function processAction(transmuxer, action) {
18894 transmuxer.postMessage({
18895 action: action
18896 });
18897 dequeue(transmuxer);
18898 };
18899 var enqueueAction = function enqueueAction(action, transmuxer) {
18900 if (!transmuxer.currentTransmux) {
18901 transmuxer.currentTransmux = action;
18902 processAction(transmuxer, action);
18903 return;
18904 }
18905
18906 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
18907 };
18908 var reset = function reset(transmuxer) {
18909 enqueueAction('reset', transmuxer);
18910 };
18911 var endTimeline = function endTimeline(transmuxer) {
18912 enqueueAction('endTimeline', transmuxer);
18913 };
18914 var transmux = function transmux(options) {
18915 if (!options.transmuxer.currentTransmux) {
18916 options.transmuxer.currentTransmux = options;
18917 processTransmux(options);
18918 return;
18919 }
18920
18921 options.transmuxer.transmuxQueue.push(options);
18922 };
18923 var createTransmuxer = function createTransmuxer(options) {
18924 var transmuxer = new TransmuxWorker();
18925 transmuxer.currentTransmux = null;
18926 transmuxer.transmuxQueue = [];
18927 var term = transmuxer.terminate;
18928
18929 transmuxer.terminate = function () {
18930 transmuxer.currentTransmux = null;
18931 transmuxer.transmuxQueue.length = 0;
18932 return term.call(transmuxer);
18933 };
18934
18935 transmuxer.postMessage({
18936 action: 'init',
18937 options: options
18938 });
18939 return transmuxer;
18940 };
18941 var segmentTransmuxer = {
18942 reset: reset,
18943 endTimeline: endTimeline,
18944 transmux: transmux,
18945 createTransmuxer: createTransmuxer
18946 };
18947
18948 var workerCallback = function workerCallback(options) {
18949 var transmuxer = options.transmuxer;
18950 var endAction = options.endAction || options.action;
18951 var callback = options.callback;
18952
18953 var message = _extends_1({}, options, {
18954 endAction: null,
18955 transmuxer: null,
18956 callback: null
18957 });
18958
18959 var listenForEndEvent = function listenForEndEvent(event) {
18960 if (event.data.action !== endAction) {
18961 return;
18962 }
18963
18964 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
18965
18966 if (event.data.data) {
18967 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
18968
18969 if (options.data) {
18970 options.data = event.data.data;
18971 }
18972 }
18973
18974 callback(event.data);
18975 };
18976
18977 transmuxer.addEventListener('message', listenForEndEvent);
18978
18979 if (options.data) {
18980 var isArrayBuffer = options.data instanceof ArrayBuffer;
18981 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
18982 message.byteLength = options.data.byteLength;
18983 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
18984 transmuxer.postMessage(message, transfers);
18985 } else {
18986 transmuxer.postMessage(message);
18987 }
18988 };
18989
18990 var REQUEST_ERRORS = {
18991 FAILURE: 2,
18992 TIMEOUT: -101,
18993 ABORTED: -102
18994 };
18995 /**
18996 * Abort all requests
18997 *
18998 * @param {Object} activeXhrs - an object that tracks all XHR requests
18999 */
19000
19001 var abortAll = function abortAll(activeXhrs) {
19002 activeXhrs.forEach(function (xhr) {
19003 xhr.abort();
19004 });
19005 };
19006 /**
19007 * Gather important bandwidth stats once a request has completed
19008 *
19009 * @param {Object} request - the XHR request from which to gather stats
19010 */
19011
19012
19013 var getRequestStats = function getRequestStats(request) {
19014 return {
19015 bandwidth: request.bandwidth,
19016 bytesReceived: request.bytesReceived || 0,
19017 roundTripTime: request.roundTripTime || 0
19018 };
19019 };
19020 /**
19021 * If possible gather bandwidth stats as a request is in
19022 * progress
19023 *
19024 * @param {Event} progressEvent - an event object from an XHR's progress event
19025 */
19026
19027
19028 var getProgressStats = function getProgressStats(progressEvent) {
19029 var request = progressEvent.target;
19030 var roundTripTime = Date.now() - request.requestTime;
19031 var stats = {
19032 bandwidth: Infinity,
19033 bytesReceived: 0,
19034 roundTripTime: roundTripTime || 0
19035 };
19036 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
19037 // because we should only use bandwidth stats on progress to determine when
19038 // abort a request early due to insufficient bandwidth
19039
19040 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
19041 return stats;
19042 };
19043 /**
19044 * Handle all error conditions in one place and return an object
19045 * with all the information
19046 *
19047 * @param {Error|null} error - if non-null signals an error occured with the XHR
19048 * @param {Object} request - the XHR request that possibly generated the error
19049 */
19050
19051
19052 var handleErrors = function handleErrors(error, request) {
19053 if (request.timedout) {
19054 return {
19055 status: request.status,
19056 message: 'HLS request timed-out at URL: ' + request.uri,
19057 code: REQUEST_ERRORS.TIMEOUT,
19058 xhr: request
19059 };
19060 }
19061
19062 if (request.aborted) {
19063 return {
19064 status: request.status,
19065 message: 'HLS request aborted at URL: ' + request.uri,
19066 code: REQUEST_ERRORS.ABORTED,
19067 xhr: request
19068 };
19069 }
19070
19071 if (error) {
19072 return {
19073 status: request.status,
19074 message: 'HLS request errored at URL: ' + request.uri,
19075 code: REQUEST_ERRORS.FAILURE,
19076 xhr: request
19077 };
19078 }
19079
19080 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
19081 return {
19082 status: request.status,
19083 message: 'Empty HLS response at URL: ' + request.uri,
19084 code: REQUEST_ERRORS.FAILURE,
19085 xhr: request
19086 };
19087 }
19088
19089 return null;
19090 };
19091 /**
19092 * Handle responses for key data and convert the key data to the correct format
19093 * for the decryption step later
19094 *
19095 * @param {Object} segment - a simplified copy of the segmentInfo object
19096 * from SegmentLoader
19097 * @param {Array} objects - objects to add the key bytes to.
19098 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19099 * this request
19100 */
19101
19102
19103 var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
19104 return function (error, request) {
19105 var response = request.response;
19106 var errorObj = handleErrors(error, request);
19107
19108 if (errorObj) {
19109 return finishProcessingFn(errorObj, segment);
19110 }
19111
19112 if (response.byteLength !== 16) {
19113 return finishProcessingFn({
19114 status: request.status,
19115 message: 'Invalid HLS key at URL: ' + request.uri,
19116 code: REQUEST_ERRORS.FAILURE,
19117 xhr: request
19118 }, segment);
19119 }
19120
19121 var view = new DataView(response);
19122 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
19123
19124 for (var i = 0; i < objects.length; i++) {
19125 objects[i].bytes = bytes;
19126 }
19127
19128 return finishProcessingFn(null, segment);
19129 };
19130 };
19131
19132 var parseInitSegment = function parseInitSegment(segment, _callback) {
19133 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
19134 // only know how to parse mp4 init segments at the moment
19135
19136 if (type !== 'mp4') {
19137 var uri = segment.map.resolvedUri || segment.map.uri;
19138 return _callback({
19139 internal: true,
19140 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
19141 code: REQUEST_ERRORS.FAILURE
19142 });
19143 }
19144
19145 workerCallback({
19146 action: 'probeMp4Tracks',
19147 data: segment.map.bytes,
19148 transmuxer: segment.transmuxer,
19149 callback: function callback(_ref) {
19150 var tracks = _ref.tracks,
19151 data = _ref.data;
19152 // transfer bytes back to us
19153 segment.map.bytes = data;
19154 tracks.forEach(function (track) {
19155 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
19156
19157 if (segment.map.tracks[track.type]) {
19158 return;
19159 }
19160
19161 segment.map.tracks[track.type] = track;
19162
19163 if (typeof track.id === 'number' && track.timescale) {
19164 segment.map.timescales = segment.map.timescales || {};
19165 segment.map.timescales[track.id] = track.timescale;
19166 }
19167 });
19168 return _callback(null);
19169 }
19170 });
19171 };
19172 /**
19173 * Handle init-segment responses
19174 *
19175 * @param {Object} segment - a simplified copy of the segmentInfo object
19176 * from SegmentLoader
19177 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19178 * this request
19179 */
19180
19181
19182 var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
19183 var segment = _ref2.segment,
19184 finishProcessingFn = _ref2.finishProcessingFn;
19185 return function (error, request) {
19186 var errorObj = handleErrors(error, request);
19187
19188 if (errorObj) {
19189 return finishProcessingFn(errorObj, segment);
19190 }
19191
19192 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
19193 // until the key request is done to decrypt.
19194
19195 if (segment.map.key) {
19196 segment.map.encryptedBytes = bytes;
19197 return finishProcessingFn(null, segment);
19198 }
19199
19200 segment.map.bytes = bytes;
19201 parseInitSegment(segment, function (parseError) {
19202 if (parseError) {
19203 parseError.xhr = request;
19204 parseError.status = request.status;
19205 return finishProcessingFn(parseError, segment);
19206 }
19207
19208 finishProcessingFn(null, segment);
19209 });
19210 };
19211 };
19212 /**
19213 * Response handler for segment-requests being sure to set the correct
19214 * property depending on whether the segment is encryped or not
19215 * Also records and keeps track of stats that are used for ABR purposes
19216 *
19217 * @param {Object} segment - a simplified copy of the segmentInfo object
19218 * from SegmentLoader
19219 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19220 * this request
19221 */
19222
19223
19224 var handleSegmentResponse = function handleSegmentResponse(_ref3) {
19225 var segment = _ref3.segment,
19226 finishProcessingFn = _ref3.finishProcessingFn,
19227 responseType = _ref3.responseType;
19228 return function (error, request) {
19229 var errorObj = handleErrors(error, request);
19230
19231 if (errorObj) {
19232 return finishProcessingFn(errorObj, segment);
19233 }
19234
19235 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
19236 // thrown for two primary cases:
19237 // 1. the mime type override stops working, or is not implemented for a specific
19238 // browser
19239 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
19240 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
19241 segment.stats = getRequestStats(request);
19242
19243 if (segment.key) {
19244 segment.encryptedBytes = new Uint8Array(newBytes);
19245 } else {
19246 segment.bytes = new Uint8Array(newBytes);
19247 }
19248
19249 return finishProcessingFn(null, segment);
19250 };
19251 };
19252
19253 var transmuxAndNotify = function transmuxAndNotify(_ref4) {
19254 var segment = _ref4.segment,
19255 bytes = _ref4.bytes,
19256 trackInfoFn = _ref4.trackInfoFn,
19257 timingInfoFn = _ref4.timingInfoFn,
19258 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
19259 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
19260 id3Fn = _ref4.id3Fn,
19261 captionsFn = _ref4.captionsFn,
19262 isEndOfTimeline = _ref4.isEndOfTimeline,
19263 endedTimelineFn = _ref4.endedTimelineFn,
19264 dataFn = _ref4.dataFn,
19265 doneFn = _ref4.doneFn,
19266 onTransmuxerLog = _ref4.onTransmuxerLog;
19267 var fmp4Tracks = segment.map && segment.map.tracks || {};
19268 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
19269 // One reason for this is that in the case of full segments, we want to trust start
19270 // times from the probe, rather than the transmuxer.
19271
19272 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
19273 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
19274 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
19275 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
19276
19277 var finish = function finish() {
19278 return transmux({
19279 bytes: bytes,
19280 transmuxer: segment.transmuxer,
19281 audioAppendStart: segment.audioAppendStart,
19282 gopsToAlignWith: segment.gopsToAlignWith,
19283 remux: isMuxed,
19284 onData: function onData(result) {
19285 result.type = result.type === 'combined' ? 'video' : result.type;
19286 dataFn(segment, result);
19287 },
19288 onTrackInfo: function onTrackInfo(trackInfo) {
19289 if (trackInfoFn) {
19290 if (isMuxed) {
19291 trackInfo.isMuxed = true;
19292 }
19293
19294 trackInfoFn(segment, trackInfo);
19295 }
19296 },
19297 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
19298 // we only want the first start value we encounter
19299 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
19300 audioStartFn(audioTimingInfo.start);
19301 audioStartFn = null;
19302 } // we want to continually update the end time
19303
19304
19305 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
19306 audioEndFn(audioTimingInfo.end);
19307 }
19308 },
19309 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
19310 // we only want the first start value we encounter
19311 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
19312 videoStartFn(videoTimingInfo.start);
19313 videoStartFn = null;
19314 } // we want to continually update the end time
19315
19316
19317 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
19318 videoEndFn(videoTimingInfo.end);
19319 }
19320 },
19321 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
19322 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
19323 },
19324 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
19325 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
19326 },
19327 onId3: function onId3(id3Frames, dispatchType) {
19328 id3Fn(segment, id3Frames, dispatchType);
19329 },
19330 onCaptions: function onCaptions(captions) {
19331 captionsFn(segment, [captions]);
19332 },
19333 isEndOfTimeline: isEndOfTimeline,
19334 onEndedTimeline: function onEndedTimeline() {
19335 endedTimelineFn();
19336 },
19337 onTransmuxerLog: onTransmuxerLog,
19338 onDone: function onDone(result) {
19339 if (!doneFn) {
19340 return;
19341 }
19342
19343 result.type = result.type === 'combined' ? 'video' : result.type;
19344 doneFn(null, segment, result);
19345 }
19346 });
19347 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
19348 // Meaning cached frame data may corrupt our notion of where this segment
19349 // really starts. To get around this, probe for the info needed.
19350
19351
19352 workerCallback({
19353 action: 'probeTs',
19354 transmuxer: segment.transmuxer,
19355 data: bytes,
19356 baseStartTime: segment.baseStartTime,
19357 callback: function callback(data) {
19358 segment.bytes = bytes = data.data;
19359 var probeResult = data.result;
19360
19361 if (probeResult) {
19362 trackInfoFn(segment, {
19363 hasAudio: probeResult.hasAudio,
19364 hasVideo: probeResult.hasVideo,
19365 isMuxed: isMuxed
19366 });
19367 trackInfoFn = null;
19368
19369 if (probeResult.hasAudio && !isMuxed) {
19370 audioStartFn(probeResult.audioStart);
19371 }
19372
19373 if (probeResult.hasVideo) {
19374 videoStartFn(probeResult.videoStart);
19375 }
19376
19377 audioStartFn = null;
19378 videoStartFn = null;
19379 }
19380
19381 finish();
19382 }
19383 });
19384 };
19385
19386 var handleSegmentBytes = function handleSegmentBytes(_ref5) {
19387 var segment = _ref5.segment,
19388 bytes = _ref5.bytes,
19389 trackInfoFn = _ref5.trackInfoFn,
19390 timingInfoFn = _ref5.timingInfoFn,
19391 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
19392 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
19393 id3Fn = _ref5.id3Fn,
19394 captionsFn = _ref5.captionsFn,
19395 isEndOfTimeline = _ref5.isEndOfTimeline,
19396 endedTimelineFn = _ref5.endedTimelineFn,
19397 dataFn = _ref5.dataFn,
19398 doneFn = _ref5.doneFn,
19399 onTransmuxerLog = _ref5.onTransmuxerLog;
19400 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
19401 // We should have a handler that fetches the number of bytes required
19402 // to check if something is fmp4. This will allow us to save bandwidth
19403 // because we can only blacklist a playlist and abort requests
19404 // by codec after trackinfo triggers.
19405
19406 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
19407 segment.isFmp4 = true;
19408 var tracks = segment.map.tracks;
19409 var trackInfo = {
19410 isFmp4: true,
19411 hasVideo: !!tracks.video,
19412 hasAudio: !!tracks.audio
19413 }; // if we have a audio track, with a codec that is not set to
19414 // encrypted audio
19415
19416 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
19417 trackInfo.audioCodec = tracks.audio.codec;
19418 } // if we have a video track, with a codec that is not set to
19419 // encrypted video
19420
19421
19422 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
19423 trackInfo.videoCodec = tracks.video.codec;
19424 }
19425
19426 if (tracks.video && tracks.audio) {
19427 trackInfo.isMuxed = true;
19428 } // since we don't support appending fmp4 data on progress, we know we have the full
19429 // segment here
19430
19431
19432 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
19433 // time. The end time can be roughly calculated by the receiver using the duration.
19434 //
19435 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
19436 // that is the true start of the segment (where the playback engine should begin
19437 // decoding).
19438
19439 var finishLoading = function finishLoading(captions) {
19440 // if the track still has audio at this point it is only possible
19441 // for it to be audio only. See `tracks.video && tracks.audio` if statement
19442 // above.
19443 // we make sure to use segment.bytes here as that
19444 dataFn(segment, {
19445 data: bytesAsUint8Array,
19446 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
19447 });
19448
19449 if (captions && captions.length) {
19450 captionsFn(segment, captions);
19451 }
19452
19453 doneFn(null, segment, {});
19454 };
19455
19456 workerCallback({
19457 action: 'probeMp4StartTime',
19458 timescales: segment.map.timescales,
19459 data: bytesAsUint8Array,
19460 transmuxer: segment.transmuxer,
19461 callback: function callback(_ref6) {
19462 var data = _ref6.data,
19463 startTime = _ref6.startTime;
19464 // transfer bytes back to us
19465 bytes = data.buffer;
19466 segment.bytes = bytesAsUint8Array = data;
19467
19468 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
19469 timingInfoFn(segment, 'audio', 'start', startTime);
19470 }
19471
19472 if (trackInfo.hasVideo) {
19473 timingInfoFn(segment, 'video', 'start', startTime);
19474 } // Run through the CaptionParser in case there are captions.
19475 // Initialize CaptionParser if it hasn't been yet
19476
19477
19478 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
19479 finishLoading();
19480 return;
19481 }
19482
19483 workerCallback({
19484 action: 'pushMp4Captions',
19485 endAction: 'mp4Captions',
19486 transmuxer: segment.transmuxer,
19487 data: bytesAsUint8Array,
19488 timescales: segment.map.timescales,
19489 trackIds: [tracks.video.id],
19490 callback: function callback(message) {
19491 // transfer bytes back to us
19492 bytes = message.data.buffer;
19493 segment.bytes = bytesAsUint8Array = message.data;
19494 message.logs.forEach(function (log) {
19495 onTransmuxerLog(videojs__default["default"].mergeOptions(log, {
19496 stream: 'mp4CaptionParser'
19497 }));
19498 });
19499 finishLoading(message.captions);
19500 }
19501 });
19502 }
19503 });
19504 return;
19505 } // VTT or other segments that don't need processing
19506
19507
19508 if (!segment.transmuxer) {
19509 doneFn(null, segment, {});
19510 return;
19511 }
19512
19513 if (typeof segment.container === 'undefined') {
19514 segment.container = detectContainerForBytes(bytesAsUint8Array);
19515 }
19516
19517 if (segment.container !== 'ts' && segment.container !== 'aac') {
19518 trackInfoFn(segment, {
19519 hasAudio: false,
19520 hasVideo: false
19521 });
19522 doneFn(null, segment, {});
19523 return;
19524 } // ts or aac
19525
19526
19527 transmuxAndNotify({
19528 segment: segment,
19529 bytes: bytes,
19530 trackInfoFn: trackInfoFn,
19531 timingInfoFn: timingInfoFn,
19532 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19533 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19534 id3Fn: id3Fn,
19535 captionsFn: captionsFn,
19536 isEndOfTimeline: isEndOfTimeline,
19537 endedTimelineFn: endedTimelineFn,
19538 dataFn: dataFn,
19539 doneFn: doneFn,
19540 onTransmuxerLog: onTransmuxerLog
19541 });
19542 };
19543
19544 var decrypt = function decrypt(_ref7, callback) {
19545 var id = _ref7.id,
19546 key = _ref7.key,
19547 encryptedBytes = _ref7.encryptedBytes,
19548 decryptionWorker = _ref7.decryptionWorker;
19549
19550 var decryptionHandler = function decryptionHandler(event) {
19551 if (event.data.source === id) {
19552 decryptionWorker.removeEventListener('message', decryptionHandler);
19553 var decrypted = event.data.decrypted;
19554 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
19555 }
19556 };
19557
19558 decryptionWorker.addEventListener('message', decryptionHandler);
19559 var keyBytes;
19560
19561 if (key.bytes.slice) {
19562 keyBytes = key.bytes.slice();
19563 } else {
19564 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
19565 } // incrementally decrypt the bytes
19566
19567
19568 decryptionWorker.postMessage(createTransferableMessage({
19569 source: id,
19570 encrypted: encryptedBytes,
19571 key: keyBytes,
19572 iv: key.iv
19573 }), [encryptedBytes.buffer, keyBytes.buffer]);
19574 };
19575 /**
19576 * Decrypt the segment via the decryption web worker
19577 *
19578 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19579 * routines
19580 * @param {Object} segment - a simplified copy of the segmentInfo object
19581 * from SegmentLoader
19582 * @param {Function} trackInfoFn - a callback that receives track info
19583 * @param {Function} timingInfoFn - a callback that receives timing info
19584 * @param {Function} videoSegmentTimingInfoFn
19585 * a callback that receives video timing info based on media times and
19586 * any adjustments made by the transmuxer
19587 * @param {Function} audioSegmentTimingInfoFn
19588 * a callback that receives audio timing info based on media times and
19589 * any adjustments made by the transmuxer
19590 * @param {boolean} isEndOfTimeline
19591 * true if this segment represents the last segment in a timeline
19592 * @param {Function} endedTimelineFn
19593 * a callback made when a timeline is ended, will only be called if
19594 * isEndOfTimeline is true
19595 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19596 * and ready to use
19597 * @param {Function} doneFn - a callback that is executed after decryption has completed
19598 */
19599
19600
19601 var decryptSegment = function decryptSegment(_ref8) {
19602 var decryptionWorker = _ref8.decryptionWorker,
19603 segment = _ref8.segment,
19604 trackInfoFn = _ref8.trackInfoFn,
19605 timingInfoFn = _ref8.timingInfoFn,
19606 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
19607 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
19608 id3Fn = _ref8.id3Fn,
19609 captionsFn = _ref8.captionsFn,
19610 isEndOfTimeline = _ref8.isEndOfTimeline,
19611 endedTimelineFn = _ref8.endedTimelineFn,
19612 dataFn = _ref8.dataFn,
19613 doneFn = _ref8.doneFn,
19614 onTransmuxerLog = _ref8.onTransmuxerLog;
19615 decrypt({
19616 id: segment.requestId,
19617 key: segment.key,
19618 encryptedBytes: segment.encryptedBytes,
19619 decryptionWorker: decryptionWorker
19620 }, function (decryptedBytes) {
19621 segment.bytes = decryptedBytes;
19622 handleSegmentBytes({
19623 segment: segment,
19624 bytes: segment.bytes,
19625 trackInfoFn: trackInfoFn,
19626 timingInfoFn: timingInfoFn,
19627 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19628 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19629 id3Fn: id3Fn,
19630 captionsFn: captionsFn,
19631 isEndOfTimeline: isEndOfTimeline,
19632 endedTimelineFn: endedTimelineFn,
19633 dataFn: dataFn,
19634 doneFn: doneFn,
19635 onTransmuxerLog: onTransmuxerLog
19636 });
19637 });
19638 };
19639 /**
19640 * This function waits for all XHRs to finish (with either success or failure)
19641 * before continueing processing via it's callback. The function gathers errors
19642 * from each request into a single errors array so that the error status for
19643 * each request can be examined later.
19644 *
19645 * @param {Object} activeXhrs - an object that tracks all XHR requests
19646 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19647 * routines
19648 * @param {Function} trackInfoFn - a callback that receives track info
19649 * @param {Function} timingInfoFn - a callback that receives timing info
19650 * @param {Function} videoSegmentTimingInfoFn
19651 * a callback that receives video timing info based on media times and
19652 * any adjustments made by the transmuxer
19653 * @param {Function} audioSegmentTimingInfoFn
19654 * a callback that receives audio timing info based on media times and
19655 * any adjustments made by the transmuxer
19656 * @param {Function} id3Fn - a callback that receives ID3 metadata
19657 * @param {Function} captionsFn - a callback that receives captions
19658 * @param {boolean} isEndOfTimeline
19659 * true if this segment represents the last segment in a timeline
19660 * @param {Function} endedTimelineFn
19661 * a callback made when a timeline is ended, will only be called if
19662 * isEndOfTimeline is true
19663 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19664 * and ready to use
19665 * @param {Function} doneFn - a callback that is executed after all resources have been
19666 * downloaded and any decryption completed
19667 */
19668
19669
19670 var waitForCompletion = function waitForCompletion(_ref9) {
19671 var activeXhrs = _ref9.activeXhrs,
19672 decryptionWorker = _ref9.decryptionWorker,
19673 trackInfoFn = _ref9.trackInfoFn,
19674 timingInfoFn = _ref9.timingInfoFn,
19675 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
19676 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
19677 id3Fn = _ref9.id3Fn,
19678 captionsFn = _ref9.captionsFn,
19679 isEndOfTimeline = _ref9.isEndOfTimeline,
19680 endedTimelineFn = _ref9.endedTimelineFn,
19681 dataFn = _ref9.dataFn,
19682 doneFn = _ref9.doneFn,
19683 onTransmuxerLog = _ref9.onTransmuxerLog;
19684 var count = 0;
19685 var didError = false;
19686 return function (error, segment) {
19687 if (didError) {
19688 return;
19689 }
19690
19691 if (error) {
19692 didError = true; // If there are errors, we have to abort any outstanding requests
19693
19694 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
19695 // handle the aborted events from those requests, there are some cases where we may
19696 // never get an aborted event. For instance, if the network connection is lost and
19697 // there were two requests, the first may have triggered an error immediately, while
19698 // the second request remains unsent. In that case, the aborted algorithm will not
19699 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
19700 //
19701 // We also can't rely on the ready state of the XHR, since the request that
19702 // triggered the connection error may also show as a ready state of 0 (unsent).
19703 // Therefore, we have to finish this group of requests immediately after the first
19704 // seen error.
19705
19706 return doneFn(error, segment);
19707 }
19708
19709 count += 1;
19710
19711 if (count === activeXhrs.length) {
19712 var segmentFinish = function segmentFinish() {
19713 if (segment.encryptedBytes) {
19714 return decryptSegment({
19715 decryptionWorker: decryptionWorker,
19716 segment: segment,
19717 trackInfoFn: trackInfoFn,
19718 timingInfoFn: timingInfoFn,
19719 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19720 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19721 id3Fn: id3Fn,
19722 captionsFn: captionsFn,
19723 isEndOfTimeline: isEndOfTimeline,
19724 endedTimelineFn: endedTimelineFn,
19725 dataFn: dataFn,
19726 doneFn: doneFn,
19727 onTransmuxerLog: onTransmuxerLog
19728 });
19729 } // Otherwise, everything is ready just continue
19730
19731
19732 handleSegmentBytes({
19733 segment: segment,
19734 bytes: segment.bytes,
19735 trackInfoFn: trackInfoFn,
19736 timingInfoFn: timingInfoFn,
19737 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19738 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19739 id3Fn: id3Fn,
19740 captionsFn: captionsFn,
19741 isEndOfTimeline: isEndOfTimeline,
19742 endedTimelineFn: endedTimelineFn,
19743 dataFn: dataFn,
19744 doneFn: doneFn,
19745 onTransmuxerLog: onTransmuxerLog
19746 });
19747 }; // Keep track of when *all* of the requests have completed
19748
19749
19750 segment.endOfAllRequests = Date.now();
19751
19752 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
19753 return decrypt({
19754 decryptionWorker: decryptionWorker,
19755 // add -init to the "id" to differentiate between segment
19756 // and init segment decryption, just in case they happen
19757 // at the same time at some point in the future.
19758 id: segment.requestId + '-init',
19759 encryptedBytes: segment.map.encryptedBytes,
19760 key: segment.map.key
19761 }, function (decryptedBytes) {
19762 segment.map.bytes = decryptedBytes;
19763 parseInitSegment(segment, function (parseError) {
19764 if (parseError) {
19765 abortAll(activeXhrs);
19766 return doneFn(parseError, segment);
19767 }
19768
19769 segmentFinish();
19770 });
19771 });
19772 }
19773
19774 segmentFinish();
19775 }
19776 };
19777 };
19778 /**
19779 * Calls the abort callback if any request within the batch was aborted. Will only call
19780 * the callback once per batch of requests, even if multiple were aborted.
19781 *
19782 * @param {Object} loadendState - state to check to see if the abort function was called
19783 * @param {Function} abortFn - callback to call for abort
19784 */
19785
19786
19787 var handleLoadEnd = function handleLoadEnd(_ref10) {
19788 var loadendState = _ref10.loadendState,
19789 abortFn = _ref10.abortFn;
19790 return function (event) {
19791 var request = event.target;
19792
19793 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
19794 abortFn();
19795 loadendState.calledAbortFn = true;
19796 }
19797 };
19798 };
19799 /**
19800 * Simple progress event callback handler that gathers some stats before
19801 * executing a provided callback with the `segment` object
19802 *
19803 * @param {Object} segment - a simplified copy of the segmentInfo object
19804 * from SegmentLoader
19805 * @param {Function} progressFn - a callback that is executed each time a progress event
19806 * is received
19807 * @param {Function} trackInfoFn - a callback that receives track info
19808 * @param {Function} timingInfoFn - a callback that receives timing info
19809 * @param {Function} videoSegmentTimingInfoFn
19810 * a callback that receives video timing info based on media times and
19811 * any adjustments made by the transmuxer
19812 * @param {Function} audioSegmentTimingInfoFn
19813 * a callback that receives audio timing info based on media times and
19814 * any adjustments made by the transmuxer
19815 * @param {boolean} isEndOfTimeline
19816 * true if this segment represents the last segment in a timeline
19817 * @param {Function} endedTimelineFn
19818 * a callback made when a timeline is ended, will only be called if
19819 * isEndOfTimeline is true
19820 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19821 * and ready to use
19822 * @param {Event} event - the progress event object from XMLHttpRequest
19823 */
19824
19825
19826 var handleProgress = function handleProgress(_ref11) {
19827 var segment = _ref11.segment,
19828 progressFn = _ref11.progressFn;
19829 _ref11.trackInfoFn;
19830 _ref11.timingInfoFn;
19831 _ref11.videoSegmentTimingInfoFn;
19832 _ref11.audioSegmentTimingInfoFn;
19833 _ref11.id3Fn;
19834 _ref11.captionsFn;
19835 _ref11.isEndOfTimeline;
19836 _ref11.endedTimelineFn;
19837 _ref11.dataFn;
19838 return function (event) {
19839 var request = event.target;
19840
19841 if (request.aborted) {
19842 return;
19843 }
19844
19845 segment.stats = videojs__default["default"].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
19846
19847 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
19848 segment.stats.firstBytesReceivedAt = Date.now();
19849 }
19850
19851 return progressFn(event, segment);
19852 };
19853 };
19854 /**
19855 * Load all resources and does any processing necessary for a media-segment
19856 *
19857 * Features:
19858 * decrypts the media-segment if it has a key uri and an iv
19859 * aborts *all* requests if *any* one request fails
19860 *
19861 * The segment object, at minimum, has the following format:
19862 * {
19863 * resolvedUri: String,
19864 * [transmuxer]: Object,
19865 * [byterange]: {
19866 * offset: Number,
19867 * length: Number
19868 * },
19869 * [key]: {
19870 * resolvedUri: String
19871 * [byterange]: {
19872 * offset: Number,
19873 * length: Number
19874 * },
19875 * iv: {
19876 * bytes: Uint32Array
19877 * }
19878 * },
19879 * [map]: {
19880 * resolvedUri: String,
19881 * [byterange]: {
19882 * offset: Number,
19883 * length: Number
19884 * },
19885 * [bytes]: Uint8Array
19886 * }
19887 * }
19888 * ...where [name] denotes optional properties
19889 *
19890 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
19891 * @param {Object} xhrOptions - the base options to provide to all xhr requests
19892 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
19893 * decryption routines
19894 * @param {Object} segment - a simplified copy of the segmentInfo object
19895 * from SegmentLoader
19896 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
19897 * aborted
19898 * @param {Function} progressFn - a callback that receives progress events from the main
19899 * segment's xhr request
19900 * @param {Function} trackInfoFn - a callback that receives track info
19901 * @param {Function} timingInfoFn - a callback that receives timing info
19902 * @param {Function} videoSegmentTimingInfoFn
19903 * a callback that receives video timing info based on media times and
19904 * any adjustments made by the transmuxer
19905 * @param {Function} audioSegmentTimingInfoFn
19906 * a callback that receives audio timing info based on media times and
19907 * any adjustments made by the transmuxer
19908 * @param {Function} id3Fn - a callback that receives ID3 metadata
19909 * @param {Function} captionsFn - a callback that receives captions
19910 * @param {boolean} isEndOfTimeline
19911 * true if this segment represents the last segment in a timeline
19912 * @param {Function} endedTimelineFn
19913 * a callback made when a timeline is ended, will only be called if
19914 * isEndOfTimeline is true
19915 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
19916 * request, transmuxed if needed
19917 * @param {Function} doneFn - a callback that is executed only once all requests have
19918 * succeeded or failed
19919 * @return {Function} a function that, when invoked, immediately aborts all
19920 * outstanding requests
19921 */
19922
19923
19924 var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
19925 var xhr = _ref12.xhr,
19926 xhrOptions = _ref12.xhrOptions,
19927 decryptionWorker = _ref12.decryptionWorker,
19928 segment = _ref12.segment,
19929 abortFn = _ref12.abortFn,
19930 progressFn = _ref12.progressFn,
19931 trackInfoFn = _ref12.trackInfoFn,
19932 timingInfoFn = _ref12.timingInfoFn,
19933 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
19934 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
19935 id3Fn = _ref12.id3Fn,
19936 captionsFn = _ref12.captionsFn,
19937 isEndOfTimeline = _ref12.isEndOfTimeline,
19938 endedTimelineFn = _ref12.endedTimelineFn,
19939 dataFn = _ref12.dataFn,
19940 doneFn = _ref12.doneFn,
19941 onTransmuxerLog = _ref12.onTransmuxerLog;
19942 var activeXhrs = [];
19943 var finishProcessingFn = waitForCompletion({
19944 activeXhrs: activeXhrs,
19945 decryptionWorker: decryptionWorker,
19946 trackInfoFn: trackInfoFn,
19947 timingInfoFn: timingInfoFn,
19948 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19949 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19950 id3Fn: id3Fn,
19951 captionsFn: captionsFn,
19952 isEndOfTimeline: isEndOfTimeline,
19953 endedTimelineFn: endedTimelineFn,
19954 dataFn: dataFn,
19955 doneFn: doneFn,
19956 onTransmuxerLog: onTransmuxerLog
19957 }); // optionally, request the decryption key
19958
19959 if (segment.key && !segment.key.bytes) {
19960 var objects = [segment.key];
19961
19962 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
19963 objects.push(segment.map.key);
19964 }
19965
19966 var keyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
19967 uri: segment.key.resolvedUri,
19968 responseType: 'arraybuffer'
19969 });
19970 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
19971 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
19972 activeXhrs.push(keyXhr);
19973 } // optionally, request the associated media init segment
19974
19975
19976 if (segment.map && !segment.map.bytes) {
19977 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
19978
19979 if (differentMapKey) {
19980 var mapKeyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
19981 uri: segment.map.key.resolvedUri,
19982 responseType: 'arraybuffer'
19983 });
19984 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
19985 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
19986 activeXhrs.push(mapKeyXhr);
19987 }
19988
19989 var initSegmentOptions = videojs__default["default"].mergeOptions(xhrOptions, {
19990 uri: segment.map.resolvedUri,
19991 responseType: 'arraybuffer',
19992 headers: segmentXhrHeaders(segment.map)
19993 });
19994 var initSegmentRequestCallback = handleInitSegmentResponse({
19995 segment: segment,
19996 finishProcessingFn: finishProcessingFn
19997 });
19998 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
19999 activeXhrs.push(initSegmentXhr);
20000 }
20001
20002 var segmentRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20003 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
20004 responseType: 'arraybuffer',
20005 headers: segmentXhrHeaders(segment)
20006 });
20007 var segmentRequestCallback = handleSegmentResponse({
20008 segment: segment,
20009 finishProcessingFn: finishProcessingFn,
20010 responseType: segmentRequestOptions.responseType
20011 });
20012 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
20013 segmentXhr.addEventListener('progress', handleProgress({
20014 segment: segment,
20015 progressFn: progressFn,
20016 trackInfoFn: trackInfoFn,
20017 timingInfoFn: timingInfoFn,
20018 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
20019 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
20020 id3Fn: id3Fn,
20021 captionsFn: captionsFn,
20022 isEndOfTimeline: isEndOfTimeline,
20023 endedTimelineFn: endedTimelineFn,
20024 dataFn: dataFn
20025 }));
20026 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
20027 // multiple times, provide a shared state object
20028
20029 var loadendState = {};
20030 activeXhrs.forEach(function (activeXhr) {
20031 activeXhr.addEventListener('loadend', handleLoadEnd({
20032 loadendState: loadendState,
20033 abortFn: abortFn
20034 }));
20035 });
20036 return function () {
20037 return abortAll(activeXhrs);
20038 };
20039 };
20040
20041 /**
20042 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
20043 * codec strings, or translating codec strings into objects that can be examined.
20044 */
20045 var logFn$1 = logger('CodecUtils');
20046 /**
20047 * Returns a set of codec strings parsed from the playlist or the default
20048 * codec strings if no codecs were specified in the playlist
20049 *
20050 * @param {Playlist} media the current media playlist
20051 * @return {Object} an object with the video and audio codecs
20052 */
20053
20054 var getCodecs = function getCodecs(media) {
20055 // if the codecs were explicitly specified, use them instead of the
20056 // defaults
20057 var mediaAttributes = media.attributes || {};
20058
20059 if (mediaAttributes.CODECS) {
20060 return parseCodecs(mediaAttributes.CODECS);
20061 }
20062 };
20063
20064 var isMaat = function isMaat(master, media) {
20065 var mediaAttributes = media.attributes || {};
20066 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20067 };
20068 var isMuxed = function isMuxed(master, media) {
20069 if (!isMaat(master, media)) {
20070 return true;
20071 }
20072
20073 var mediaAttributes = media.attributes || {};
20074 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20075
20076 for (var groupId in audioGroup) {
20077 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
20078 // or there are listed playlists (the case for DASH, as the manifest will have already
20079 // provided all of the details necessary to generate the audio playlist, as opposed to
20080 // HLS' externally requested playlists), then the content is demuxed.
20081 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
20082 return true;
20083 }
20084 }
20085
20086 return false;
20087 };
20088 var unwrapCodecList = function unwrapCodecList(codecList) {
20089 var codecs = {};
20090 codecList.forEach(function (_ref) {
20091 var mediaType = _ref.mediaType,
20092 type = _ref.type,
20093 details = _ref.details;
20094 codecs[mediaType] = codecs[mediaType] || [];
20095 codecs[mediaType].push(translateLegacyCodec("" + type + details));
20096 });
20097 Object.keys(codecs).forEach(function (mediaType) {
20098 if (codecs[mediaType].length > 1) {
20099 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
20100 codecs[mediaType] = null;
20101 return;
20102 }
20103
20104 codecs[mediaType] = codecs[mediaType][0];
20105 });
20106 return codecs;
20107 };
20108 var codecCount = function codecCount(codecObj) {
20109 var count = 0;
20110
20111 if (codecObj.audio) {
20112 count++;
20113 }
20114
20115 if (codecObj.video) {
20116 count++;
20117 }
20118
20119 return count;
20120 };
20121 /**
20122 * Calculates the codec strings for a working configuration of
20123 * SourceBuffers to play variant streams in a master playlist. If
20124 * there is no possible working configuration, an empty object will be
20125 * returned.
20126 *
20127 * @param master {Object} the m3u8 object for the master playlist
20128 * @param media {Object} the m3u8 object for the variant playlist
20129 * @return {Object} the codec strings.
20130 *
20131 * @private
20132 */
20133
20134 var codecsForPlaylist = function codecsForPlaylist(master, media) {
20135 var mediaAttributes = media.attributes || {};
20136 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
20137 // Put another way, there is no way to have a video-only multiple-audio HLS!
20138
20139 if (isMaat(master, media) && !codecInfo.audio) {
20140 if (!isMuxed(master, media)) {
20141 // It is possible for codecs to be specified on the audio media group playlist but
20142 // not on the rendition playlist. This is mostly the case for DASH, where audio and
20143 // video are always separate (and separately specified).
20144 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
20145
20146 if (defaultCodecs.audio) {
20147 codecInfo.audio = defaultCodecs.audio;
20148 }
20149 }
20150 }
20151
20152 return codecInfo;
20153 };
20154
20155 var logFn = logger('PlaylistSelector');
20156
20157 var representationToString = function representationToString(representation) {
20158 if (!representation || !representation.playlist) {
20159 return;
20160 }
20161
20162 var playlist = representation.playlist;
20163 return JSON.stringify({
20164 id: playlist.id,
20165 bandwidth: representation.bandwidth,
20166 width: representation.width,
20167 height: representation.height,
20168 codecs: playlist.attributes && playlist.attributes.CODECS || ''
20169 });
20170 }; // Utilities
20171
20172 /**
20173 * Returns the CSS value for the specified property on an element
20174 * using `getComputedStyle`. Firefox has a long-standing issue where
20175 * getComputedStyle() may return null when running in an iframe with
20176 * `display: none`.
20177 *
20178 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
20179 * @param {HTMLElement} el the htmlelement to work on
20180 * @param {string} the proprety to get the style for
20181 */
20182
20183
20184 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
20185 if (!el) {
20186 return '';
20187 }
20188
20189 var result = window.getComputedStyle(el);
20190
20191 if (!result) {
20192 return '';
20193 }
20194
20195 return result[property];
20196 };
20197 /**
20198 * Resuable stable sort function
20199 *
20200 * @param {Playlists} array
20201 * @param {Function} sortFn Different comparators
20202 * @function stableSort
20203 */
20204
20205
20206 var stableSort = function stableSort(array, sortFn) {
20207 var newArray = array.slice();
20208 array.sort(function (left, right) {
20209 var cmp = sortFn(left, right);
20210
20211 if (cmp === 0) {
20212 return newArray.indexOf(left) - newArray.indexOf(right);
20213 }
20214
20215 return cmp;
20216 });
20217 };
20218 /**
20219 * A comparator function to sort two playlist object by bandwidth.
20220 *
20221 * @param {Object} left a media playlist object
20222 * @param {Object} right a media playlist object
20223 * @return {number} Greater than zero if the bandwidth attribute of
20224 * left is greater than the corresponding attribute of right. Less
20225 * than zero if the bandwidth of right is greater than left and
20226 * exactly zero if the two are equal.
20227 */
20228
20229
20230 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
20231 var leftBandwidth;
20232 var rightBandwidth;
20233
20234 if (left.attributes.BANDWIDTH) {
20235 leftBandwidth = left.attributes.BANDWIDTH;
20236 }
20237
20238 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
20239
20240 if (right.attributes.BANDWIDTH) {
20241 rightBandwidth = right.attributes.BANDWIDTH;
20242 }
20243
20244 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
20245 return leftBandwidth - rightBandwidth;
20246 };
20247 /**
20248 * A comparator function to sort two playlist object by resolution (width).
20249 *
20250 * @param {Object} left a media playlist object
20251 * @param {Object} right a media playlist object
20252 * @return {number} Greater than zero if the resolution.width attribute of
20253 * left is greater than the corresponding attribute of right. Less
20254 * than zero if the resolution.width of right is greater than left and
20255 * exactly zero if the two are equal.
20256 */
20257
20258 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
20259 var leftWidth;
20260 var rightWidth;
20261
20262 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
20263 leftWidth = left.attributes.RESOLUTION.width;
20264 }
20265
20266 leftWidth = leftWidth || window.Number.MAX_VALUE;
20267
20268 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
20269 rightWidth = right.attributes.RESOLUTION.width;
20270 }
20271
20272 rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
20273 // have the same media dimensions/ resolution
20274
20275 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
20276 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
20277 }
20278
20279 return leftWidth - rightWidth;
20280 };
20281 /**
20282 * Chooses the appropriate media playlist based on bandwidth and player size
20283 *
20284 * @param {Object} master
20285 * Object representation of the master manifest
20286 * @param {number} playerBandwidth
20287 * Current calculated bandwidth of the player
20288 * @param {number} playerWidth
20289 * Current width of the player element (should account for the device pixel ratio)
20290 * @param {number} playerHeight
20291 * Current height of the player element (should account for the device pixel ratio)
20292 * @param {boolean} limitRenditionByPlayerDimensions
20293 * True if the player width and height should be used during the selection, false otherwise
20294 * @param {Object} masterPlaylistController
20295 * the current masterPlaylistController object
20296 * @return {Playlist} the highest bitrate playlist less than the
20297 * currently detected bandwidth, accounting for some amount of
20298 * bandwidth variance
20299 */
20300
20301 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
20302 // If we end up getting called before `master` is available, exit early
20303 if (!master) {
20304 return;
20305 }
20306
20307 var options = {
20308 bandwidth: playerBandwidth,
20309 width: playerWidth,
20310 height: playerHeight,
20311 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
20312 };
20313 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
20314
20315 if (Playlist.isAudioOnly(master)) {
20316 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
20317 // at the buttom of this function for debugging.
20318
20319 options.audioOnly = true;
20320 } // convert the playlists to an intermediary representation to make comparisons easier
20321
20322
20323 var sortedPlaylistReps = playlists.map(function (playlist) {
20324 var bandwidth;
20325 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
20326 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
20327 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
20328 bandwidth = bandwidth || window.Number.MAX_VALUE;
20329 return {
20330 bandwidth: bandwidth,
20331 width: width,
20332 height: height,
20333 playlist: playlist
20334 };
20335 });
20336 stableSort(sortedPlaylistReps, function (left, right) {
20337 return left.bandwidth - right.bandwidth;
20338 }); // filter out any playlists that have been excluded due to
20339 // incompatible configurations
20340
20341 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20342 return !Playlist.isIncompatible(rep.playlist);
20343 }); // filter out any playlists that have been disabled manually through the representations
20344 // api or blacklisted temporarily due to playback errors.
20345
20346 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20347 return Playlist.isEnabled(rep.playlist);
20348 });
20349
20350 if (!enabledPlaylistReps.length) {
20351 // if there are no enabled playlists, then they have all been blacklisted or disabled
20352 // by the user through the representations api. In this case, ignore blacklisting and
20353 // fallback to what the user wants by using playlists the user has not disabled.
20354 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20355 return !Playlist.isDisabled(rep.playlist);
20356 });
20357 } // filter out any variant that has greater effective bitrate
20358 // than the current estimated bandwidth
20359
20360
20361 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
20362 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
20363 });
20364 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
20365 // and then taking the very first element
20366
20367 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
20368 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20369 })[0]; // if we're not going to limit renditions by player size, make an early decision.
20370
20371 if (limitRenditionByPlayerDimensions === false) {
20372 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20373
20374 if (_chosenRep && _chosenRep.playlist) {
20375 var type = 'sortedPlaylistReps';
20376
20377 if (bandwidthBestRep) {
20378 type = 'bandwidthBestRep';
20379 }
20380
20381 if (enabledPlaylistReps[0]) {
20382 type = 'enabledPlaylistReps';
20383 }
20384
20385 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
20386 return _chosenRep.playlist;
20387 }
20388
20389 logFn('could not choose a playlist with options', options);
20390 return null;
20391 } // filter out playlists without resolution information
20392
20393
20394 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
20395 return rep.width && rep.height;
20396 }); // sort variants by resolution
20397
20398 stableSort(haveResolution, function (left, right) {
20399 return left.width - right.width;
20400 }); // if we have the exact resolution as the player use it
20401
20402 var resolutionBestRepList = haveResolution.filter(function (rep) {
20403 return rep.width === playerWidth && rep.height === playerHeight;
20404 });
20405 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
20406
20407 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
20408 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20409 })[0];
20410 var resolutionPlusOneList;
20411 var resolutionPlusOneSmallest;
20412 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
20413 // if there is no match of exact resolution
20414
20415 if (!resolutionBestRep) {
20416 resolutionPlusOneList = haveResolution.filter(function (rep) {
20417 return rep.width > playerWidth || rep.height > playerHeight;
20418 }); // find all the variants have the same smallest resolution
20419
20420 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
20421 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
20422 }); // ensure that we also pick the highest bandwidth variant that
20423 // is just-larger-than the video player
20424
20425 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
20426 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
20427 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20428 })[0];
20429 }
20430
20431 var leastPixelDiffRep; // If this selector proves to be better than others,
20432 // resolutionPlusOneRep and resolutionBestRep and all
20433 // the code involving them should be removed.
20434
20435 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
20436 // find the variant that is closest to the player's pixel size
20437 var leastPixelDiffList = haveResolution.map(function (rep) {
20438 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
20439 return rep;
20440 }); // get the highest bandwidth, closest resolution playlist
20441
20442 stableSort(leastPixelDiffList, function (left, right) {
20443 // sort by highest bandwidth if pixelDiff is the same
20444 if (left.pixelDiff === right.pixelDiff) {
20445 return right.bandwidth - left.bandwidth;
20446 }
20447
20448 return left.pixelDiff - right.pixelDiff;
20449 });
20450 leastPixelDiffRep = leastPixelDiffList[0];
20451 } // fallback chain of variants
20452
20453
20454 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20455
20456 if (chosenRep && chosenRep.playlist) {
20457 var _type = 'sortedPlaylistReps';
20458
20459 if (leastPixelDiffRep) {
20460 _type = 'leastPixelDiffRep';
20461 } else if (resolutionPlusOneRep) {
20462 _type = 'resolutionPlusOneRep';
20463 } else if (resolutionBestRep) {
20464 _type = 'resolutionBestRep';
20465 } else if (bandwidthBestRep) {
20466 _type = 'bandwidthBestRep';
20467 } else if (enabledPlaylistReps[0]) {
20468 _type = 'enabledPlaylistReps';
20469 }
20470
20471 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
20472 return chosenRep.playlist;
20473 }
20474
20475 logFn('could not choose a playlist with options', options);
20476 return null;
20477 };
20478
20479 /**
20480 * Chooses the appropriate media playlist based on the most recent
20481 * bandwidth estimate and the player size.
20482 *
20483 * Expects to be called within the context of an instance of VhsHandler
20484 *
20485 * @return {Playlist} the highest bitrate playlist less than the
20486 * currently detected bandwidth, accounting for some amount of
20487 * bandwidth variance
20488 */
20489
20490 var lastBandwidthSelector = function lastBandwidthSelector() {
20491 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20492 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20493 };
20494 /**
20495 * Chooses the appropriate media playlist based on an
20496 * exponential-weighted moving average of the bandwidth after
20497 * filtering for player size.
20498 *
20499 * Expects to be called within the context of an instance of VhsHandler
20500 *
20501 * @param {number} decay - a number between 0 and 1. Higher values of
20502 * this parameter will cause previous bandwidth estimates to lose
20503 * significance more quickly.
20504 * @return {Function} a function which can be invoked to create a new
20505 * playlist selector function.
20506 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
20507 */
20508
20509 var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
20510 var average = -1;
20511 var lastSystemBandwidth = -1;
20512
20513 if (decay < 0 || decay > 1) {
20514 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
20515 }
20516
20517 return function () {
20518 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20519
20520 if (average < 0) {
20521 average = this.systemBandwidth;
20522 lastSystemBandwidth = this.systemBandwidth;
20523 } // stop the average value from decaying for every 250ms
20524 // when the systemBandwidth is constant
20525 // and
20526 // stop average from setting to a very low value when the
20527 // systemBandwidth becomes 0 in case of chunk cancellation
20528
20529
20530 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
20531 average = decay * this.systemBandwidth + (1 - decay) * average;
20532 lastSystemBandwidth = this.systemBandwidth;
20533 }
20534
20535 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20536 };
20537 };
20538 /**
20539 * Chooses the appropriate media playlist based on the potential to rebuffer
20540 *
20541 * @param {Object} settings
20542 * Object of information required to use this selector
20543 * @param {Object} settings.master
20544 * Object representation of the master manifest
20545 * @param {number} settings.currentTime
20546 * The current time of the player
20547 * @param {number} settings.bandwidth
20548 * Current measured bandwidth
20549 * @param {number} settings.duration
20550 * Duration of the media
20551 * @param {number} settings.segmentDuration
20552 * Segment duration to be used in round trip time calculations
20553 * @param {number} settings.timeUntilRebuffer
20554 * Time left in seconds until the player has to rebuffer
20555 * @param {number} settings.currentTimeline
20556 * The current timeline segments are being loaded from
20557 * @param {SyncController} settings.syncController
20558 * SyncController for determining if we have a sync point for a given playlist
20559 * @return {Object|null}
20560 * {Object} return.playlist
20561 * The highest bandwidth playlist with the least amount of rebuffering
20562 * {Number} return.rebufferingImpact
20563 * The amount of time in seconds switching to this playlist will rebuffer. A
20564 * negative value means that switching will cause zero rebuffering.
20565 */
20566
20567 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
20568 var master = settings.master,
20569 currentTime = settings.currentTime,
20570 bandwidth = settings.bandwidth,
20571 duration = settings.duration,
20572 segmentDuration = settings.segmentDuration,
20573 timeUntilRebuffer = settings.timeUntilRebuffer,
20574 currentTimeline = settings.currentTimeline,
20575 syncController = settings.syncController; // filter out any playlists that have been excluded due to
20576 // incompatible configurations
20577
20578 var compatiblePlaylists = master.playlists.filter(function (playlist) {
20579 return !Playlist.isIncompatible(playlist);
20580 }); // filter out any playlists that have been disabled manually through the representations
20581 // api or blacklisted temporarily due to playback errors.
20582
20583 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
20584
20585 if (!enabledPlaylists.length) {
20586 // if there are no enabled playlists, then they have all been blacklisted or disabled
20587 // by the user through the representations api. In this case, ignore blacklisting and
20588 // fallback to what the user wants by using playlists the user has not disabled.
20589 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
20590 return !Playlist.isDisabled(playlist);
20591 });
20592 }
20593
20594 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
20595 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
20596 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
20597 // sync request first. This will double the request time
20598
20599 var numRequests = syncPoint ? 1 : 2;
20600 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
20601 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
20602 return {
20603 playlist: playlist,
20604 rebufferingImpact: rebufferingImpact
20605 };
20606 });
20607 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
20608 return estimate.rebufferingImpact <= 0;
20609 }); // Sort by bandwidth DESC
20610
20611 stableSort(noRebufferingPlaylists, function (a, b) {
20612 return comparePlaylistBandwidth(b.playlist, a.playlist);
20613 });
20614
20615 if (noRebufferingPlaylists.length) {
20616 return noRebufferingPlaylists[0];
20617 }
20618
20619 stableSort(rebufferingEstimates, function (a, b) {
20620 return a.rebufferingImpact - b.rebufferingImpact;
20621 });
20622 return rebufferingEstimates[0] || null;
20623 };
20624 /**
20625 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
20626 * one with video. If no renditions with video exist, return the lowest audio rendition.
20627 *
20628 * Expects to be called within the context of an instance of VhsHandler
20629 *
20630 * @return {Object|null}
20631 * {Object} return.playlist
20632 * The lowest bitrate playlist that contains a video codec. If no such rendition
20633 * exists pick the lowest audio rendition.
20634 */
20635
20636 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
20637 var _this = this;
20638
20639 // filter out any playlists that have been excluded due to
20640 // incompatible configurations or playback errors
20641 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
20642
20643 stableSort(playlists, function (a, b) {
20644 return comparePlaylistBandwidth(a, b);
20645 }); // Parse and assume that playlists with no video codec have no video
20646 // (this is not necessarily true, although it is generally true).
20647 //
20648 // If an entire manifest has no valid videos everything will get filtered
20649 // out.
20650
20651 var playlistsWithVideo = playlists.filter(function (playlist) {
20652 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
20653 });
20654 return playlistsWithVideo[0] || null;
20655 };
20656
20657 /**
20658 * Combine all segments into a single Uint8Array
20659 *
20660 * @param {Object} segmentObj
20661 * @return {Uint8Array} concatenated bytes
20662 * @private
20663 */
20664 var concatSegments = function concatSegments(segmentObj) {
20665 var offset = 0;
20666 var tempBuffer;
20667
20668 if (segmentObj.bytes) {
20669 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
20670
20671 segmentObj.segments.forEach(function (segment) {
20672 tempBuffer.set(segment, offset);
20673 offset += segment.byteLength;
20674 });
20675 }
20676
20677 return tempBuffer;
20678 };
20679
20680 /**
20681 * @file text-tracks.js
20682 */
20683 /**
20684 * Create captions text tracks on video.js if they do not exist
20685 *
20686 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20687 * @param {Object} tech the video.js tech
20688 * @param {Object} captionStream the caption stream to create
20689 * @private
20690 */
20691
20692 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
20693 if (!inbandTextTracks[captionStream]) {
20694 tech.trigger({
20695 type: 'usage',
20696 name: 'vhs-608'
20697 });
20698 tech.trigger({
20699 type: 'usage',
20700 name: 'hls-608'
20701 });
20702 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
20703
20704 if (/^cc708_/.test(captionStream)) {
20705 instreamId = 'SERVICE' + captionStream.split('_')[1];
20706 }
20707
20708 var track = tech.textTracks().getTrackById(instreamId);
20709
20710 if (track) {
20711 // Resuse an existing track with a CC# id because this was
20712 // very likely created by videojs-contrib-hls from information
20713 // in the m3u8 for us to use
20714 inbandTextTracks[captionStream] = track;
20715 } else {
20716 // This section gets called when we have caption services that aren't specified in the manifest.
20717 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
20718 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
20719 var label = captionStream;
20720 var language = captionStream;
20721 var def = false;
20722 var captionService = captionServices[instreamId];
20723
20724 if (captionService) {
20725 label = captionService.label;
20726 language = captionService.language;
20727 def = captionService.default;
20728 } // Otherwise, create a track with the default `CC#` label and
20729 // without a language
20730
20731
20732 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
20733 kind: 'captions',
20734 id: instreamId,
20735 // TODO: investigate why this doesn't seem to turn the caption on by default
20736 default: def,
20737 label: label,
20738 language: language
20739 }, false).track;
20740 }
20741 }
20742 };
20743 /**
20744 * Add caption text track data to a source handler given an array of captions
20745 *
20746 * @param {Object}
20747 * @param {Object} inbandTextTracks the inband text tracks
20748 * @param {number} timestampOffset the timestamp offset of the source buffer
20749 * @param {Array} captionArray an array of caption data
20750 * @private
20751 */
20752
20753 var addCaptionData = function addCaptionData(_ref) {
20754 var inbandTextTracks = _ref.inbandTextTracks,
20755 captionArray = _ref.captionArray,
20756 timestampOffset = _ref.timestampOffset;
20757
20758 if (!captionArray) {
20759 return;
20760 }
20761
20762 var Cue = window.WebKitDataCue || window.VTTCue;
20763 captionArray.forEach(function (caption) {
20764 var track = caption.stream;
20765 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
20766 });
20767 };
20768 /**
20769 * Define properties on a cue for backwards compatability,
20770 * but warn the user that the way that they are using it
20771 * is depricated and will be removed at a later date.
20772 *
20773 * @param {Cue} cue the cue to add the properties on
20774 * @private
20775 */
20776
20777 var deprecateOldCue = function deprecateOldCue(cue) {
20778 Object.defineProperties(cue.frame, {
20779 id: {
20780 get: function get() {
20781 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
20782 return cue.value.key;
20783 }
20784 },
20785 value: {
20786 get: function get() {
20787 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
20788 return cue.value.data;
20789 }
20790 },
20791 privateData: {
20792 get: function get() {
20793 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
20794 return cue.value.data;
20795 }
20796 }
20797 });
20798 };
20799 /**
20800 * Add metadata text track data to a source handler given an array of metadata
20801 *
20802 * @param {Object}
20803 * @param {Object} inbandTextTracks the inband text tracks
20804 * @param {Array} metadataArray an array of meta data
20805 * @param {number} timestampOffset the timestamp offset of the source buffer
20806 * @param {number} videoDuration the duration of the video
20807 * @private
20808 */
20809
20810
20811 var addMetadata = function addMetadata(_ref2) {
20812 var inbandTextTracks = _ref2.inbandTextTracks,
20813 metadataArray = _ref2.metadataArray,
20814 timestampOffset = _ref2.timestampOffset,
20815 videoDuration = _ref2.videoDuration;
20816
20817 if (!metadataArray) {
20818 return;
20819 }
20820
20821 var Cue = window.WebKitDataCue || window.VTTCue;
20822 var metadataTrack = inbandTextTracks.metadataTrack_;
20823
20824 if (!metadataTrack) {
20825 return;
20826 }
20827
20828 metadataArray.forEach(function (metadata) {
20829 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
20830 // ignore this bit of metadata.
20831 // This likely occurs when you have an non-timed ID3 tag like TIT2,
20832 // which is the "Title/Songname/Content description" frame
20833
20834 if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
20835 return;
20836 }
20837
20838 metadata.frames.forEach(function (frame) {
20839 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
20840 cue.frame = frame;
20841 cue.value = frame;
20842 deprecateOldCue(cue);
20843 metadataTrack.addCue(cue);
20844 });
20845 });
20846
20847 if (!metadataTrack.cues || !metadataTrack.cues.length) {
20848 return;
20849 } // Updating the metadeta cues so that
20850 // the endTime of each cue is the startTime of the next cue
20851 // the endTime of last cue is the duration of the video
20852
20853
20854 var cues = metadataTrack.cues;
20855 var cuesArray = []; // Create a copy of the TextTrackCueList...
20856 // ...disregarding cues with a falsey value
20857
20858 for (var i = 0; i < cues.length; i++) {
20859 if (cues[i]) {
20860 cuesArray.push(cues[i]);
20861 }
20862 } // Group cues by their startTime value
20863
20864
20865 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
20866 var timeSlot = obj[cue.startTime] || [];
20867 timeSlot.push(cue);
20868 obj[cue.startTime] = timeSlot;
20869 return obj;
20870 }, {}); // Sort startTimes by ascending order
20871
20872 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
20873 return Number(a) - Number(b);
20874 }); // Map each cue group's endTime to the next group's startTime
20875
20876 sortedStartTimes.forEach(function (startTime, idx) {
20877 var cueGroup = cuesGroupedByStartTime[startTime];
20878 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
20879
20880 cueGroup.forEach(function (cue) {
20881 cue.endTime = nextTime;
20882 });
20883 });
20884 };
20885 /**
20886 * Create metadata text track on video.js if it does not exist
20887 *
20888 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20889 * @param {string} dispatchType the inband metadata track dispatch type
20890 * @param {Object} tech the video.js tech
20891 * @private
20892 */
20893
20894 var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
20895 if (inbandTextTracks.metadataTrack_) {
20896 return;
20897 }
20898
20899 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
20900 kind: 'metadata',
20901 label: 'Timed Metadata'
20902 }, false).track;
20903 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
20904 };
20905 /**
20906 * Remove cues from a track on video.js.
20907 *
20908 * @param {Double} start start of where we should remove the cue
20909 * @param {Double} end end of where the we should remove the cue
20910 * @param {Object} track the text track to remove the cues from
20911 * @private
20912 */
20913
20914 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
20915 var i;
20916 var cue;
20917
20918 if (!track) {
20919 return;
20920 }
20921
20922 if (!track.cues) {
20923 return;
20924 }
20925
20926 i = track.cues.length;
20927
20928 while (i--) {
20929 cue = track.cues[i]; // Remove any cue within the provided start and end time
20930
20931 if (cue.startTime >= start && cue.endTime <= end) {
20932 track.removeCue(cue);
20933 }
20934 }
20935 };
20936 /**
20937 * Remove duplicate cues from a track on video.js (a cue is considered a
20938 * duplicate if it has the same time interval and text as another)
20939 *
20940 * @param {Object} track the text track to remove the duplicate cues from
20941 * @private
20942 */
20943
20944 var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
20945 var cues = track.cues;
20946
20947 if (!cues) {
20948 return;
20949 }
20950
20951 for (var i = 0; i < cues.length; i++) {
20952 var duplicates = [];
20953 var occurrences = 0;
20954
20955 for (var j = 0; j < cues.length; j++) {
20956 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
20957 occurrences++;
20958
20959 if (occurrences > 1) {
20960 duplicates.push(cues[j]);
20961 }
20962 }
20963 }
20964
20965 if (duplicates.length) {
20966 duplicates.forEach(function (dupe) {
20967 return track.removeCue(dupe);
20968 });
20969 }
20970 }
20971 };
20972
20973 /**
20974 * mux.js
20975 *
20976 * Copyright (c) Brightcove
20977 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
20978 */
20979 var ONE_SECOND_IN_TS = 90000,
20980 // 90kHz clock
20981 secondsToVideoTs,
20982 secondsToAudioTs,
20983 videoTsToSeconds,
20984 audioTsToSeconds,
20985 audioTsToVideoTs,
20986 videoTsToAudioTs,
20987 metadataTsToSeconds;
20988
20989 secondsToVideoTs = function secondsToVideoTs(seconds) {
20990 return seconds * ONE_SECOND_IN_TS;
20991 };
20992
20993 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
20994 return seconds * sampleRate;
20995 };
20996
20997 videoTsToSeconds = function videoTsToSeconds(timestamp) {
20998 return timestamp / ONE_SECOND_IN_TS;
20999 };
21000
21001 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
21002 return timestamp / sampleRate;
21003 };
21004
21005 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
21006 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
21007 };
21008
21009 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
21010 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
21011 };
21012 /**
21013 * Adjust ID3 tag or caption timing information by the timeline pts values
21014 * (if keepOriginalTimestamps is false) and convert to seconds
21015 */
21016
21017
21018 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
21019 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
21020 };
21021
21022 var clock = {
21023 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
21024 secondsToVideoTs: secondsToVideoTs,
21025 secondsToAudioTs: secondsToAudioTs,
21026 videoTsToSeconds: videoTsToSeconds,
21027 audioTsToSeconds: audioTsToSeconds,
21028 audioTsToVideoTs: audioTsToVideoTs,
21029 videoTsToAudioTs: videoTsToAudioTs,
21030 metadataTsToSeconds: metadataTsToSeconds
21031 };
21032
21033 /**
21034 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
21035 * front of current time.
21036 *
21037 * @param {Array} buffer
21038 * The current buffer of gop information
21039 * @param {number} currentTime
21040 * The current time
21041 * @param {Double} mapping
21042 * Offset to map display time to stream presentation time
21043 * @return {Array}
21044 * List of gops considered safe to append over
21045 */
21046
21047 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
21048 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
21049 return [];
21050 } // pts value for current time + 3 seconds to give a bit more wiggle room
21051
21052
21053 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
21054 var i;
21055
21056 for (i = 0; i < buffer.length; i++) {
21057 if (buffer[i].pts > currentTimePts) {
21058 break;
21059 }
21060 }
21061
21062 return buffer.slice(i);
21063 };
21064 /**
21065 * Appends gop information (timing and byteLength) received by the transmuxer for the
21066 * gops appended in the last call to appendBuffer
21067 *
21068 * @param {Array} buffer
21069 * The current buffer of gop information
21070 * @param {Array} gops
21071 * List of new gop information
21072 * @param {boolean} replace
21073 * If true, replace the buffer with the new gop information. If false, append the
21074 * new gop information to the buffer in the right location of time.
21075 * @return {Array}
21076 * Updated list of gop information
21077 */
21078
21079 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
21080 if (!gops.length) {
21081 return buffer;
21082 }
21083
21084 if (replace) {
21085 // If we are in safe append mode, then completely overwrite the gop buffer
21086 // with the most recent appeneded data. This will make sure that when appending
21087 // future segments, we only try to align with gops that are both ahead of current
21088 // time and in the last segment appended.
21089 return gops.slice();
21090 }
21091
21092 var start = gops[0].pts;
21093 var i = 0;
21094
21095 for (i; i < buffer.length; i++) {
21096 if (buffer[i].pts >= start) {
21097 break;
21098 }
21099 }
21100
21101 return buffer.slice(0, i).concat(gops);
21102 };
21103 /**
21104 * Removes gop information in buffer that overlaps with provided start and end
21105 *
21106 * @param {Array} buffer
21107 * The current buffer of gop information
21108 * @param {Double} start
21109 * position to start the remove at
21110 * @param {Double} end
21111 * position to end the remove at
21112 * @param {Double} mapping
21113 * Offset to map display time to stream presentation time
21114 */
21115
21116 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
21117 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
21118 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
21119 var updatedBuffer = buffer.slice();
21120 var i = buffer.length;
21121
21122 while (i--) {
21123 if (buffer[i].pts <= endPts) {
21124 break;
21125 }
21126 }
21127
21128 if (i === -1) {
21129 // no removal because end of remove range is before start of buffer
21130 return updatedBuffer;
21131 }
21132
21133 var j = i + 1;
21134
21135 while (j--) {
21136 if (buffer[j].pts <= startPts) {
21137 break;
21138 }
21139 } // clamp remove range start to 0 index
21140
21141
21142 j = Math.max(j, 0);
21143 updatedBuffer.splice(j, i - j + 1);
21144 return updatedBuffer;
21145 };
21146
21147 var shallowEqual = function shallowEqual(a, b) {
21148 // if both are undefined
21149 // or one or the other is undefined
21150 // they are not equal
21151 if (!a && !b || !a && b || a && !b) {
21152 return false;
21153 } // they are the same object and thus, equal
21154
21155
21156 if (a === b) {
21157 return true;
21158 } // sort keys so we can make sure they have
21159 // all the same keys later.
21160
21161
21162 var akeys = Object.keys(a).sort();
21163 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
21164
21165 if (akeys.length !== bkeys.length) {
21166 return false;
21167 }
21168
21169 for (var i = 0; i < akeys.length; i++) {
21170 var key = akeys[i]; // different sorted keys, not equal
21171
21172 if (key !== bkeys[i]) {
21173 return false;
21174 } // different values, not equal
21175
21176
21177 if (a[key] !== b[key]) {
21178 return false;
21179 }
21180 }
21181
21182 return true;
21183 };
21184
21185 // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
21186 var QUOTA_EXCEEDED_ERR = 22;
21187
21188 /**
21189 * The segment loader has no recourse except to fetch a segment in the
21190 * current playlist and use the internal timestamps in that segment to
21191 * generate a syncPoint. This function returns a good candidate index
21192 * for that process.
21193 *
21194 * @param {Array} segments - the segments array from a playlist.
21195 * @return {number} An index of a segment from the playlist to load
21196 */
21197
21198 var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
21199 segments = segments || [];
21200 var timelineSegments = [];
21201 var time = 0;
21202
21203 for (var i = 0; i < segments.length; i++) {
21204 var segment = segments[i];
21205
21206 if (currentTimeline === segment.timeline) {
21207 timelineSegments.push(i);
21208 time += segment.duration;
21209
21210 if (time > targetTime) {
21211 return i;
21212 }
21213 }
21214 }
21215
21216 if (timelineSegments.length === 0) {
21217 return 0;
21218 } // default to the last timeline segment
21219
21220
21221 return timelineSegments[timelineSegments.length - 1];
21222 }; // In the event of a quota exceeded error, keep at least one second of back buffer. This
21223 // number was arbitrarily chosen and may be updated in the future, but seemed reasonable
21224 // as a start to prevent any potential issues with removing content too close to the
21225 // playhead.
21226
21227 var MIN_BACK_BUFFER = 1; // in ms
21228
21229 var CHECK_BUFFER_DELAY = 500;
21230
21231 var finite = function finite(num) {
21232 return typeof num === 'number' && isFinite(num);
21233 }; // With most content hovering around 30fps, if a segment has a duration less than a half
21234 // frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
21235 // not accurately reflect the rest of the content.
21236
21237
21238 var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
21239 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
21240 // Although these checks should most likely cover non 'main' types, for now it narrows
21241 // the scope of our checks.
21242 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
21243 return null;
21244 }
21245
21246 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
21247 return 'Neither audio nor video found in segment.';
21248 }
21249
21250 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
21251 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
21252 }
21253
21254 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
21255 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
21256 }
21257
21258 return null;
21259 };
21260 /**
21261 * Calculates a time value that is safe to remove from the back buffer without interrupting
21262 * playback.
21263 *
21264 * @param {TimeRange} seekable
21265 * The current seekable range
21266 * @param {number} currentTime
21267 * The current time of the player
21268 * @param {number} targetDuration
21269 * The target duration of the current playlist
21270 * @return {number}
21271 * Time that is safe to remove from the back buffer without interrupting playback
21272 */
21273
21274 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
21275 // 30 seconds before the playhead provides a safe default for trimming.
21276 //
21277 // Choosing a reasonable default is particularly important for high bitrate content and
21278 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
21279 // throw an APPEND_BUFFER_ERR.
21280 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
21281
21282 if (seekable.length) {
21283 // Some live playlists may have a shorter window of content than the full allowed back
21284 // buffer. For these playlists, don't save content that's no longer within the window.
21285 trimTime = Math.max(trimTime, seekable.start(0));
21286 } // Don't remove within target duration of the current time to avoid the possibility of
21287 // removing the GOP currently being played, as removing it can cause playback stalls.
21288
21289
21290 var maxTrimTime = currentTime - targetDuration;
21291 return Math.min(maxTrimTime, trimTime);
21292 };
21293 var segmentInfoString = function segmentInfoString(segmentInfo) {
21294 var startOfSegment = segmentInfo.startOfSegment,
21295 duration = segmentInfo.duration,
21296 segment = segmentInfo.segment,
21297 part = segmentInfo.part,
21298 _segmentInfo$playlist = segmentInfo.playlist,
21299 seq = _segmentInfo$playlist.mediaSequence,
21300 id = _segmentInfo$playlist.id,
21301 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
21302 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
21303 index = segmentInfo.mediaIndex,
21304 partIndex = segmentInfo.partIndex,
21305 timeline = segmentInfo.timeline;
21306 var segmentLen = segments.length - 1;
21307 var selection = 'mediaIndex/partIndex increment';
21308
21309 if (segmentInfo.getMediaInfoForTime) {
21310 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
21311 } else if (segmentInfo.isSyncRequest) {
21312 selection = 'getSyncSegmentCandidate (isSyncRequest)';
21313 }
21314
21315 if (segmentInfo.independent) {
21316 selection += " with independent " + segmentInfo.independent;
21317 }
21318
21319 var hasPartIndex = typeof partIndex === 'number';
21320 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
21321 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
21322 preloadSegment: segment
21323 }) - 1 : 0;
21324 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
21325 };
21326
21327 var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
21328 return mediaType + "TimingInfo";
21329 };
21330 /**
21331 * Returns the timestamp offset to use for the segment.
21332 *
21333 * @param {number} segmentTimeline
21334 * The timeline of the segment
21335 * @param {number} currentTimeline
21336 * The timeline currently being followed by the loader
21337 * @param {number} startOfSegment
21338 * The estimated segment start
21339 * @param {TimeRange[]} buffered
21340 * The loader's buffer
21341 * @param {boolean} overrideCheck
21342 * If true, no checks are made to see if the timestamp offset value should be set,
21343 * but sets it directly to a value.
21344 *
21345 * @return {number|null}
21346 * Either a number representing a new timestamp offset, or null if the segment is
21347 * part of the same timeline
21348 */
21349
21350
21351 var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
21352 var segmentTimeline = _ref.segmentTimeline,
21353 currentTimeline = _ref.currentTimeline,
21354 startOfSegment = _ref.startOfSegment,
21355 buffered = _ref.buffered,
21356 overrideCheck = _ref.overrideCheck;
21357
21358 // Check to see if we are crossing a discontinuity to see if we need to set the
21359 // timestamp offset on the transmuxer and source buffer.
21360 //
21361 // Previously, we changed the timestampOffset if the start of this segment was less than
21362 // the currently set timestampOffset, but this isn't desirable as it can produce bad
21363 // behavior, especially around long running live streams.
21364 if (!overrideCheck && segmentTimeline === currentTimeline) {
21365 return null;
21366 } // When changing renditions, it's possible to request a segment on an older timeline. For
21367 // instance, given two renditions with the following:
21368 //
21369 // #EXTINF:10
21370 // segment1
21371 // #EXT-X-DISCONTINUITY
21372 // #EXTINF:10
21373 // segment2
21374 // #EXTINF:10
21375 // segment3
21376 //
21377 // And the current player state:
21378 //
21379 // current time: 8
21380 // buffer: 0 => 20
21381 //
21382 // The next segment on the current rendition would be segment3, filling the buffer from
21383 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
21384 // then the next segment to be requested will be segment1 from the new rendition in
21385 // order to fill time 8 and onwards. Using the buffered end would result in repeated
21386 // content (since it would position segment1 of the new rendition starting at 20s). This
21387 // case can be identified when the new segment's timeline is a prior value. Instead of
21388 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
21389 // more accurate to the actual start time of the segment.
21390
21391
21392 if (segmentTimeline < currentTimeline) {
21393 return startOfSegment;
21394 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
21395 // value uses the end of the last segment if it is available. While this value
21396 // should often be correct, it's better to rely on the buffered end, as the new
21397 // content post discontinuity should line up with the buffered end as if it were
21398 // time 0 for the new content.
21399
21400
21401 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
21402 };
21403 /**
21404 * Returns whether or not the loader should wait for a timeline change from the timeline
21405 * change controller before processing the segment.
21406 *
21407 * Primary timing in VHS goes by video. This is different from most media players, as
21408 * audio is more often used as the primary timing source. For the foreseeable future, VHS
21409 * will continue to use video as the primary timing source, due to the current logic and
21410 * expectations built around it.
21411
21412 * Since the timing follows video, in order to maintain sync, the video loader is
21413 * responsible for setting both audio and video source buffer timestamp offsets.
21414 *
21415 * Setting different values for audio and video source buffers could lead to
21416 * desyncing. The following examples demonstrate some of the situations where this
21417 * distinction is important. Note that all of these cases involve demuxed content. When
21418 * content is muxed, the audio and video are packaged together, therefore syncing
21419 * separate media playlists is not an issue.
21420 *
21421 * CASE 1: Audio prepares to load a new timeline before video:
21422 *
21423 * Timeline: 0 1
21424 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21425 * Audio Loader: ^
21426 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21427 * Video Loader ^
21428 *
21429 * In the above example, the audio loader is preparing to load the 6th segment, the first
21430 * after a discontinuity, while the video loader is still loading the 5th segment, before
21431 * the discontinuity.
21432 *
21433 * If the audio loader goes ahead and loads and appends the 6th segment before the video
21434 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
21435 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
21436 * the audio loader must provide the audioAppendStart value to trim the content in the
21437 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
21438 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
21439 * segment until that value is provided.
21440 *
21441 * CASE 2: Video prepares to load a new timeline before audio:
21442 *
21443 * Timeline: 0 1
21444 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21445 * Audio Loader: ^
21446 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21447 * Video Loader ^
21448 *
21449 * In the above example, the video loader is preparing to load the 6th segment, the first
21450 * after a discontinuity, while the audio loader is still loading the 5th segment, before
21451 * the discontinuity.
21452 *
21453 * If the video loader goes ahead and loads and appends the 6th segment, then once the
21454 * segment is loaded and processed, both the video and audio timestamp offsets will be
21455 * set, since video is used as the primary timing source. This is to ensure content lines
21456 * up appropriately, as any modifications to the video timing are reflected by audio when
21457 * the video loader sets the audio and video timestamp offsets to the same value. However,
21458 * setting the timestamp offset for audio before audio has had a chance to change
21459 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
21460 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
21461 *
21462 * CASE 3: When seeking, audio prepares to load a new timeline before video
21463 *
21464 * Timeline: 0 1
21465 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21466 * Audio Loader: ^
21467 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21468 * Video Loader ^
21469 *
21470 * In the above example, both audio and video loaders are loading segments from timeline
21471 * 0, but imagine that the seek originated from timeline 1.
21472 *
21473 * When seeking to a new timeline, the timestamp offset will be set based on the expected
21474 * segment start of the loaded video segment. In order to maintain sync, the audio loader
21475 * must wait for the video loader to load its segment and update both the audio and video
21476 * timestamp offsets before it may load and append its own segment. This is the case
21477 * whether the seek results in a mismatched segment request (e.g., the audio loader
21478 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
21479 * loaders choose to load the same segment index from each playlist, as the segments may
21480 * not be aligned perfectly, even for matching segment indexes.
21481 *
21482 * @param {Object} timelinechangeController
21483 * @param {number} currentTimeline
21484 * The timeline currently being followed by the loader
21485 * @param {number} segmentTimeline
21486 * The timeline of the segment being loaded
21487 * @param {('main'|'audio')} loaderType
21488 * The loader type
21489 * @param {boolean} audioDisabled
21490 * Whether the audio is disabled for the loader. This should only be true when the
21491 * loader may have muxed audio in its segment, but should not append it, e.g., for
21492 * the main loader when an alternate audio playlist is active.
21493 *
21494 * @return {boolean}
21495 * Whether the loader should wait for a timeline change from the timeline change
21496 * controller before processing the segment
21497 */
21498
21499 var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
21500 var timelineChangeController = _ref2.timelineChangeController,
21501 currentTimeline = _ref2.currentTimeline,
21502 segmentTimeline = _ref2.segmentTimeline,
21503 loaderType = _ref2.loaderType,
21504 audioDisabled = _ref2.audioDisabled;
21505
21506 if (currentTimeline === segmentTimeline) {
21507 return false;
21508 }
21509
21510 if (loaderType === 'audio') {
21511 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
21512 type: 'main'
21513 }); // Audio loader should wait if:
21514 //
21515 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
21516 // * main hasn't yet changed to the timeline audio is looking to load
21517
21518 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
21519 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
21520 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
21521 // loader's segments (or the content is audio/video only and handled by the main
21522 // loader).
21523
21524
21525 if (loaderType === 'main' && audioDisabled) {
21526 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
21527 type: 'audio'
21528 }); // Main loader should wait for the audio loader if audio is not pending a timeline
21529 // change to the current timeline.
21530 //
21531 // Since the main loader is responsible for setting the timestamp offset for both
21532 // audio and video, the main loader must wait for audio to be about to change to its
21533 // timeline before setting the offset, otherwise, if audio is behind in loading,
21534 // segments from the previous timeline would be adjusted by the new timestamp offset.
21535 //
21536 // This requirement means that video will not cross a timeline until the audio is
21537 // about to cross to it, so that way audio and video will always cross the timeline
21538 // together.
21539 //
21540 // In addition to normal timeline changes, these rules also apply to the start of a
21541 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
21542 // that these rules apply to the first timeline change because if they did not, it's
21543 // possible that the main loader will cross two timelines before the audio loader has
21544 // crossed one. Logic may be implemented to handle the startup as a special case, but
21545 // it's easier to simply treat all timeline changes the same.
21546
21547 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
21548 return false;
21549 }
21550
21551 return true;
21552 }
21553
21554 return false;
21555 };
21556 var mediaDuration = function mediaDuration(timingInfos) {
21557 var maxDuration = 0;
21558 ['video', 'audio'].forEach(function (type) {
21559 var typeTimingInfo = timingInfos[type + "TimingInfo"];
21560
21561 if (!typeTimingInfo) {
21562 return;
21563 }
21564
21565 var start = typeTimingInfo.start,
21566 end = typeTimingInfo.end;
21567 var duration;
21568
21569 if (typeof start === 'bigint' || typeof end === 'bigint') {
21570 duration = window.BigInt(end) - window.BigInt(start);
21571 } else if (typeof start === 'number' && typeof end === 'number') {
21572 duration = end - start;
21573 }
21574
21575 if (typeof duration !== 'undefined' && duration > maxDuration) {
21576 maxDuration = duration;
21577 }
21578 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
21579 // as we only need BigInt when we are above that.
21580
21581 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
21582 maxDuration = Number(maxDuration);
21583 }
21584
21585 return maxDuration;
21586 };
21587 var segmentTooLong = function segmentTooLong(_ref3) {
21588 var segmentDuration = _ref3.segmentDuration,
21589 maxDuration = _ref3.maxDuration;
21590
21591 // 0 duration segments are most likely due to metadata only segments or a lack of
21592 // information.
21593 if (!segmentDuration) {
21594 return false;
21595 } // For HLS:
21596 //
21597 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
21598 // The EXTINF duration of each Media Segment in the Playlist
21599 // file, when rounded to the nearest integer, MUST be less than or equal
21600 // to the target duration; longer segments can trigger playback stalls
21601 // or other errors.
21602 //
21603 // For DASH, the mpd-parser uses the largest reported segment duration as the target
21604 // duration. Although that reported duration is occasionally approximate (i.e., not
21605 // exact), a strict check may report that a segment is too long more often in DASH.
21606
21607
21608 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
21609 };
21610 var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
21611 // Right now we aren't following DASH's timing model exactly, so only perform
21612 // this check for HLS content.
21613 if (sourceType !== 'hls') {
21614 return null;
21615 }
21616
21617 var segmentDuration = mediaDuration({
21618 audioTimingInfo: segmentInfo.audioTimingInfo,
21619 videoTimingInfo: segmentInfo.videoTimingInfo
21620 }); // Don't report if we lack information.
21621 //
21622 // If the segment has a duration of 0 it is either a lack of information or a
21623 // metadata only segment and shouldn't be reported here.
21624
21625 if (!segmentDuration) {
21626 return null;
21627 }
21628
21629 var targetDuration = segmentInfo.playlist.targetDuration;
21630 var isSegmentWayTooLong = segmentTooLong({
21631 segmentDuration: segmentDuration,
21632 maxDuration: targetDuration * 2
21633 });
21634 var isSegmentSlightlyTooLong = segmentTooLong({
21635 segmentDuration: segmentDuration,
21636 maxDuration: targetDuration
21637 });
21638 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
21639
21640 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
21641 return {
21642 severity: isSegmentWayTooLong ? 'warn' : 'info',
21643 message: segmentTooLongMessage
21644 };
21645 }
21646
21647 return null;
21648 };
21649 /**
21650 * An object that manages segment loading and appending.
21651 *
21652 * @class SegmentLoader
21653 * @param {Object} options required and optional options
21654 * @extends videojs.EventTarget
21655 */
21656
21657 var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
21658 inheritsLoose(SegmentLoader, _videojs$EventTarget);
21659
21660 function SegmentLoader(settings, options) {
21661 var _this;
21662
21663 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
21664
21665 if (!settings) {
21666 throw new TypeError('Initialization settings are required');
21667 }
21668
21669 if (typeof settings.currentTime !== 'function') {
21670 throw new TypeError('No currentTime getter specified');
21671 }
21672
21673 if (!settings.mediaSource) {
21674 throw new TypeError('No MediaSource specified');
21675 } // public properties
21676
21677
21678 _this.bandwidth = settings.bandwidth;
21679 _this.throughput = {
21680 rate: 0,
21681 count: 0
21682 };
21683 _this.roundTrip = NaN;
21684
21685 _this.resetStats_();
21686
21687 _this.mediaIndex = null;
21688 _this.partIndex = null; // private settings
21689
21690 _this.hasPlayed_ = settings.hasPlayed;
21691 _this.currentTime_ = settings.currentTime;
21692 _this.seekable_ = settings.seekable;
21693 _this.seeking_ = settings.seeking;
21694 _this.duration_ = settings.duration;
21695 _this.mediaSource_ = settings.mediaSource;
21696 _this.vhs_ = settings.vhs;
21697 _this.loaderType_ = settings.loaderType;
21698 _this.currentMediaInfo_ = void 0;
21699 _this.startingMediaInfo_ = void 0;
21700 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
21701 _this.goalBufferLength_ = settings.goalBufferLength;
21702 _this.sourceType_ = settings.sourceType;
21703 _this.sourceUpdater_ = settings.sourceUpdater;
21704 _this.inbandTextTracks_ = settings.inbandTextTracks;
21705 _this.state_ = 'INIT';
21706 _this.timelineChangeController_ = settings.timelineChangeController;
21707 _this.shouldSaveSegmentTimingInfo_ = true;
21708 _this.parse708captions_ = settings.parse708captions;
21709 _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
21710 _this.captionServices_ = settings.captionServices;
21711 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
21712
21713 _this.checkBufferTimeout_ = null;
21714 _this.error_ = void 0;
21715 _this.currentTimeline_ = -1;
21716 _this.pendingSegment_ = null;
21717 _this.xhrOptions_ = null;
21718 _this.pendingSegments_ = [];
21719 _this.audioDisabled_ = false;
21720 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
21721
21722 _this.gopBuffer_ = [];
21723 _this.timeMapping_ = 0;
21724 _this.safeAppend_ = videojs__default["default"].browser.IE_VERSION >= 11;
21725 _this.appendInitSegment_ = {
21726 audio: true,
21727 video: true
21728 };
21729 _this.playlistOfLastInitSegment_ = {
21730 audio: null,
21731 video: null
21732 };
21733 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
21734 // information yet to start the loading process (e.g., if the audio loader wants to
21735 // load a segment from the next timeline but the main loader hasn't yet crossed that
21736 // timeline), then the load call will be added to the queue until it is ready to be
21737 // processed.
21738
21739 _this.loadQueue_ = [];
21740 _this.metadataQueue_ = {
21741 id3: [],
21742 caption: []
21743 };
21744 _this.waitingOnRemove_ = false;
21745 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
21746
21747 _this.activeInitSegmentId_ = null;
21748 _this.initSegments_ = {}; // HLSe playback
21749
21750 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
21751 _this.keyCache_ = {};
21752 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
21753 // between a time in the display time and a segment index within
21754 // a playlist
21755
21756 _this.syncController_ = settings.syncController;
21757 _this.syncPoint_ = {
21758 segmentIndex: 0,
21759 time: 0
21760 };
21761 _this.transmuxer_ = _this.createTransmuxer_();
21762
21763 _this.triggerSyncInfoUpdate_ = function () {
21764 return _this.trigger('syncinfoupdate');
21765 };
21766
21767 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
21768
21769 _this.mediaSource_.addEventListener('sourceopen', function () {
21770 if (!_this.isEndOfStream_()) {
21771 _this.ended_ = false;
21772 }
21773 }); // ...for determining the fetch location
21774
21775
21776 _this.fetchAtBuffer_ = false;
21777 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
21778 Object.defineProperty(assertThisInitialized(_this), 'state', {
21779 get: function get() {
21780 return this.state_;
21781 },
21782 set: function set(newState) {
21783 if (newState !== this.state_) {
21784 this.logger_(this.state_ + " -> " + newState);
21785 this.state_ = newState;
21786 this.trigger('statechange');
21787 }
21788 }
21789 });
21790
21791 _this.sourceUpdater_.on('ready', function () {
21792 if (_this.hasEnoughInfoToAppend_()) {
21793 _this.processCallQueue_();
21794 }
21795 }); // Only the main loader needs to listen for pending timeline changes, as the main
21796 // loader should wait for audio to be ready to change its timeline so that both main
21797 // and audio timelines change together. For more details, see the
21798 // shouldWaitForTimelineChange function.
21799
21800
21801 if (_this.loaderType_ === 'main') {
21802 _this.timelineChangeController_.on('pendingtimelinechange', function () {
21803 if (_this.hasEnoughInfoToAppend_()) {
21804 _this.processCallQueue_();
21805 }
21806 });
21807 } // The main loader only listens on pending timeline changes, but the audio loader,
21808 // since its loads follow main, needs to listen on timeline changes. For more details,
21809 // see the shouldWaitForTimelineChange function.
21810
21811
21812 if (_this.loaderType_ === 'audio') {
21813 _this.timelineChangeController_.on('timelinechange', function () {
21814 if (_this.hasEnoughInfoToLoad_()) {
21815 _this.processLoadQueue_();
21816 }
21817
21818 if (_this.hasEnoughInfoToAppend_()) {
21819 _this.processCallQueue_();
21820 }
21821 });
21822 }
21823
21824 return _this;
21825 }
21826
21827 var _proto = SegmentLoader.prototype;
21828
21829 _proto.createTransmuxer_ = function createTransmuxer_() {
21830 return segmentTransmuxer.createTransmuxer({
21831 remux: false,
21832 alignGopsAtEnd: this.safeAppend_,
21833 keepOriginalTimestamps: true,
21834 parse708captions: this.parse708captions_,
21835 captionServices: this.captionServices_
21836 });
21837 }
21838 /**
21839 * reset all of our media stats
21840 *
21841 * @private
21842 */
21843 ;
21844
21845 _proto.resetStats_ = function resetStats_() {
21846 this.mediaBytesTransferred = 0;
21847 this.mediaRequests = 0;
21848 this.mediaRequestsAborted = 0;
21849 this.mediaRequestsTimedout = 0;
21850 this.mediaRequestsErrored = 0;
21851 this.mediaTransferDuration = 0;
21852 this.mediaSecondsLoaded = 0;
21853 this.mediaAppends = 0;
21854 }
21855 /**
21856 * dispose of the SegmentLoader and reset to the default state
21857 */
21858 ;
21859
21860 _proto.dispose = function dispose() {
21861 this.trigger('dispose');
21862 this.state = 'DISPOSED';
21863 this.pause();
21864 this.abort_();
21865
21866 if (this.transmuxer_) {
21867 this.transmuxer_.terminate();
21868 }
21869
21870 this.resetStats_();
21871
21872 if (this.checkBufferTimeout_) {
21873 window.clearTimeout(this.checkBufferTimeout_);
21874 }
21875
21876 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
21877 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
21878 }
21879
21880 this.off();
21881 };
21882
21883 _proto.setAudio = function setAudio(enable) {
21884 this.audioDisabled_ = !enable;
21885
21886 if (enable) {
21887 this.appendInitSegment_.audio = true;
21888 } else {
21889 // remove current track audio if it gets disabled
21890 this.sourceUpdater_.removeAudio(0, this.duration_());
21891 }
21892 }
21893 /**
21894 * abort anything that is currently doing on with the SegmentLoader
21895 * and reset to a default state
21896 */
21897 ;
21898
21899 _proto.abort = function abort() {
21900 if (this.state !== 'WAITING') {
21901 if (this.pendingSegment_) {
21902 this.pendingSegment_ = null;
21903 }
21904
21905 return;
21906 }
21907
21908 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
21909 // since we are no longer "waiting" on any requests. XHR callback is not always run
21910 // when the request is aborted. This will prevent the loader from being stuck in the
21911 // WAITING state indefinitely.
21912
21913 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
21914 // next segment
21915
21916 if (!this.paused()) {
21917 this.monitorBuffer_();
21918 }
21919 }
21920 /**
21921 * abort all pending xhr requests and null any pending segements
21922 *
21923 * @private
21924 */
21925 ;
21926
21927 _proto.abort_ = function abort_() {
21928 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
21929 this.pendingSegment_.abortRequests();
21930 } // clear out the segment being processed
21931
21932
21933 this.pendingSegment_ = null;
21934 this.callQueue_ = [];
21935 this.loadQueue_ = [];
21936 this.metadataQueue_.id3 = [];
21937 this.metadataQueue_.caption = [];
21938 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
21939 this.waitingOnRemove_ = false;
21940 window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
21941 this.quotaExceededErrorRetryTimeout_ = null;
21942 };
21943
21944 _proto.checkForAbort_ = function checkForAbort_(requestId) {
21945 // If the state is APPENDING, then aborts will not modify the state, meaning the first
21946 // callback that happens should reset the state to READY so that loading can continue.
21947 if (this.state === 'APPENDING' && !this.pendingSegment_) {
21948 this.state = 'READY';
21949 return true;
21950 }
21951
21952 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
21953 return true;
21954 }
21955
21956 return false;
21957 }
21958 /**
21959 * set an error on the segment loader and null out any pending segements
21960 *
21961 * @param {Error} error the error to set on the SegmentLoader
21962 * @return {Error} the error that was set or that is currently set
21963 */
21964 ;
21965
21966 _proto.error = function error(_error) {
21967 if (typeof _error !== 'undefined') {
21968 this.logger_('error occurred:', _error);
21969 this.error_ = _error;
21970 }
21971
21972 this.pendingSegment_ = null;
21973 return this.error_;
21974 };
21975
21976 _proto.endOfStream = function endOfStream() {
21977 this.ended_ = true;
21978
21979 if (this.transmuxer_) {
21980 // need to clear out any cached data to prepare for the new segment
21981 segmentTransmuxer.reset(this.transmuxer_);
21982 }
21983
21984 this.gopBuffer_.length = 0;
21985 this.pause();
21986 this.trigger('ended');
21987 }
21988 /**
21989 * Indicates which time ranges are buffered
21990 *
21991 * @return {TimeRange}
21992 * TimeRange object representing the current buffered ranges
21993 */
21994 ;
21995
21996 _proto.buffered_ = function buffered_() {
21997 var trackInfo = this.getMediaInfo_();
21998
21999 if (!this.sourceUpdater_ || !trackInfo) {
22000 return videojs__default["default"].createTimeRanges();
22001 }
22002
22003 if (this.loaderType_ === 'main') {
22004 var hasAudio = trackInfo.hasAudio,
22005 hasVideo = trackInfo.hasVideo,
22006 isMuxed = trackInfo.isMuxed;
22007
22008 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
22009 return this.sourceUpdater_.buffered();
22010 }
22011
22012 if (hasVideo) {
22013 return this.sourceUpdater_.videoBuffered();
22014 }
22015 } // One case that can be ignored for now is audio only with alt audio,
22016 // as we don't yet have proper support for that.
22017
22018
22019 return this.sourceUpdater_.audioBuffered();
22020 }
22021 /**
22022 * Gets and sets init segment for the provided map
22023 *
22024 * @param {Object} map
22025 * The map object representing the init segment to get or set
22026 * @param {boolean=} set
22027 * If true, the init segment for the provided map should be saved
22028 * @return {Object}
22029 * map object for desired init segment
22030 */
22031 ;
22032
22033 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
22034 if (set === void 0) {
22035 set = false;
22036 }
22037
22038 if (!map) {
22039 return null;
22040 }
22041
22042 var id = initSegmentId(map);
22043 var storedMap = this.initSegments_[id];
22044
22045 if (set && !storedMap && map.bytes) {
22046 this.initSegments_[id] = storedMap = {
22047 resolvedUri: map.resolvedUri,
22048 byterange: map.byterange,
22049 bytes: map.bytes,
22050 tracks: map.tracks,
22051 timescales: map.timescales
22052 };
22053 }
22054
22055 return storedMap || map;
22056 }
22057 /**
22058 * Gets and sets key for the provided key
22059 *
22060 * @param {Object} key
22061 * The key object representing the key to get or set
22062 * @param {boolean=} set
22063 * If true, the key for the provided key should be saved
22064 * @return {Object}
22065 * Key object for desired key
22066 */
22067 ;
22068
22069 _proto.segmentKey = function segmentKey(key, set) {
22070 if (set === void 0) {
22071 set = false;
22072 }
22073
22074 if (!key) {
22075 return null;
22076 }
22077
22078 var id = segmentKeyId(key);
22079 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
22080 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
22081
22082 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
22083 this.keyCache_[id] = storedKey = {
22084 resolvedUri: key.resolvedUri,
22085 bytes: key.bytes
22086 };
22087 }
22088
22089 var result = {
22090 resolvedUri: (storedKey || key).resolvedUri
22091 };
22092
22093 if (storedKey) {
22094 result.bytes = storedKey.bytes;
22095 }
22096
22097 return result;
22098 }
22099 /**
22100 * Returns true if all configuration required for loading is present, otherwise false.
22101 *
22102 * @return {boolean} True if the all configuration is ready for loading
22103 * @private
22104 */
22105 ;
22106
22107 _proto.couldBeginLoading_ = function couldBeginLoading_() {
22108 return this.playlist_ && !this.paused();
22109 }
22110 /**
22111 * load a playlist and start to fill the buffer
22112 */
22113 ;
22114
22115 _proto.load = function load() {
22116 // un-pause
22117 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
22118 // specified
22119
22120 if (!this.playlist_) {
22121 return;
22122 } // if all the configuration is ready, initialize and begin loading
22123
22124
22125 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22126 return this.init_();
22127 } // if we're in the middle of processing a segment already, don't
22128 // kick off an additional segment request
22129
22130
22131 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
22132 return;
22133 }
22134
22135 this.state = 'READY';
22136 }
22137 /**
22138 * Once all the starting parameters have been specified, begin
22139 * operation. This method should only be invoked from the INIT
22140 * state.
22141 *
22142 * @private
22143 */
22144 ;
22145
22146 _proto.init_ = function init_() {
22147 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
22148 // audio data from the muxed content should be removed
22149
22150 this.resetEverything();
22151 return this.monitorBuffer_();
22152 }
22153 /**
22154 * set a playlist on the segment loader
22155 *
22156 * @param {PlaylistLoader} media the playlist to set on the segment loader
22157 */
22158 ;
22159
22160 _proto.playlist = function playlist(newPlaylist, options) {
22161 if (options === void 0) {
22162 options = {};
22163 }
22164
22165 if (!newPlaylist) {
22166 return;
22167 }
22168
22169 var oldPlaylist = this.playlist_;
22170 var segmentInfo = this.pendingSegment_;
22171 this.playlist_ = newPlaylist;
22172 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
22173 // is always our zero-time so force a sync update each time the playlist
22174 // is refreshed from the server
22175 //
22176 // Use the INIT state to determine if playback has started, as the playlist sync info
22177 // should be fixed once requests begin (as sync points are generated based on sync
22178 // info), but not before then.
22179
22180 if (this.state === 'INIT') {
22181 newPlaylist.syncInfo = {
22182 mediaSequence: newPlaylist.mediaSequence,
22183 time: 0
22184 }; // Setting the date time mapping means mapping the program date time (if available)
22185 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
22186 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
22187 // be updated as the playlist is refreshed before the loader starts loading, the
22188 // program date time mapping needs to be updated as well.
22189 //
22190 // This mapping is only done for the main loader because a program date time should
22191 // map equivalently between playlists.
22192
22193 if (this.loaderType_ === 'main') {
22194 this.syncController_.setDateTimeMappingForStart(newPlaylist);
22195 }
22196 }
22197
22198 var oldId = null;
22199
22200 if (oldPlaylist) {
22201 if (oldPlaylist.id) {
22202 oldId = oldPlaylist.id;
22203 } else if (oldPlaylist.uri) {
22204 oldId = oldPlaylist.uri;
22205 }
22206 }
22207
22208 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
22209 // in LIVE, we always want to update with new playlists (including refreshes)
22210
22211 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
22212 // buffering now
22213
22214 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22215 return this.init_();
22216 }
22217
22218 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
22219 if (this.mediaIndex !== null) {
22220 // we must reset/resync the segment loader when we switch renditions and
22221 // the segment loader is already synced to the previous rendition
22222 // on playlist changes we want it to be possible to fetch
22223 // at the buffer for vod but not for live. So we use resetLoader
22224 // for live and resyncLoader for vod. We want this because
22225 // if a playlist uses independent and non-independent segments/parts the
22226 // buffer may not accurately reflect the next segment that we should try
22227 // downloading.
22228 if (!newPlaylist.endList) {
22229 this.resetLoader();
22230 } else {
22231 this.resyncLoader();
22232 }
22233 }
22234
22235 this.currentMediaInfo_ = void 0;
22236 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
22237
22238 return;
22239 } // we reloaded the same playlist so we are in a live scenario
22240 // and we will likely need to adjust the mediaIndex
22241
22242
22243 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
22244 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
22245 // this is important because we can abort a request and this value must be
22246 // equal to the last appended mediaIndex
22247
22248 if (this.mediaIndex !== null) {
22249 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
22250 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
22251 // new playlist was incremented by 1.
22252
22253 if (this.mediaIndex < 0) {
22254 this.mediaIndex = null;
22255 this.partIndex = null;
22256 } else {
22257 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
22258 // unless parts fell off of the playlist for this segment.
22259 // In that case we need to reset partIndex and resync
22260
22261 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
22262 var mediaIndex = this.mediaIndex;
22263 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
22264 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
22265 // as the part was dropped from our current playlists segment.
22266 // The mediaIndex will still be valid so keep that around.
22267
22268 this.mediaIndex = mediaIndex;
22269 }
22270 }
22271 } // update the mediaIndex on the SegmentInfo object
22272 // this is important because we will update this.mediaIndex with this value
22273 // in `handleAppendsDone_` after the segment has been successfully appended
22274
22275
22276 if (segmentInfo) {
22277 segmentInfo.mediaIndex -= mediaSequenceDiff;
22278
22279 if (segmentInfo.mediaIndex < 0) {
22280 segmentInfo.mediaIndex = null;
22281 segmentInfo.partIndex = null;
22282 } else {
22283 // we need to update the referenced segment so that timing information is
22284 // saved for the new playlist's segment, however, if the segment fell off the
22285 // playlist, we can leave the old reference and just lose the timing info
22286 if (segmentInfo.mediaIndex >= 0) {
22287 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
22288 }
22289
22290 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
22291 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
22292 }
22293 }
22294 }
22295
22296 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
22297 }
22298 /**
22299 * Prevent the loader from fetching additional segments. If there
22300 * is a segment request outstanding, it will finish processing
22301 * before the loader halts. A segment loader can be unpaused by
22302 * calling load().
22303 */
22304 ;
22305
22306 _proto.pause = function pause() {
22307 if (this.checkBufferTimeout_) {
22308 window.clearTimeout(this.checkBufferTimeout_);
22309 this.checkBufferTimeout_ = null;
22310 }
22311 }
22312 /**
22313 * Returns whether the segment loader is fetching additional
22314 * segments when given the opportunity. This property can be
22315 * modified through calls to pause() and load().
22316 */
22317 ;
22318
22319 _proto.paused = function paused() {
22320 return this.checkBufferTimeout_ === null;
22321 }
22322 /**
22323 * Delete all the buffered data and reset the SegmentLoader
22324 *
22325 * @param {Function} [done] an optional callback to be executed when the remove
22326 * operation is complete
22327 */
22328 ;
22329
22330 _proto.resetEverything = function resetEverything(done) {
22331 this.ended_ = false;
22332 this.appendInitSegment_ = {
22333 audio: true,
22334 video: true
22335 };
22336 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
22337 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
22338 // we then clamp the value to duration if necessary.
22339
22340 this.remove(0, Infinity, done); // clears fmp4 captions
22341
22342 if (this.transmuxer_) {
22343 this.transmuxer_.postMessage({
22344 action: 'clearAllMp4Captions'
22345 }); // reset the cache in the transmuxer
22346
22347 this.transmuxer_.postMessage({
22348 action: 'reset'
22349 });
22350 }
22351 }
22352 /**
22353 * Force the SegmentLoader to resync and start loading around the currentTime instead
22354 * of starting at the end of the buffer
22355 *
22356 * Useful for fast quality changes
22357 */
22358 ;
22359
22360 _proto.resetLoader = function resetLoader() {
22361 this.fetchAtBuffer_ = false;
22362 this.resyncLoader();
22363 }
22364 /**
22365 * Force the SegmentLoader to restart synchronization and make a conservative guess
22366 * before returning to the simple walk-forward method
22367 */
22368 ;
22369
22370 _proto.resyncLoader = function resyncLoader() {
22371 if (this.transmuxer_) {
22372 // need to clear out any cached data to prepare for the new segment
22373 segmentTransmuxer.reset(this.transmuxer_);
22374 }
22375
22376 this.mediaIndex = null;
22377 this.partIndex = null;
22378 this.syncPoint_ = null;
22379 this.isPendingTimestampOffset_ = false;
22380 this.callQueue_ = [];
22381 this.loadQueue_ = [];
22382 this.metadataQueue_.id3 = [];
22383 this.metadataQueue_.caption = [];
22384 this.abort();
22385
22386 if (this.transmuxer_) {
22387 this.transmuxer_.postMessage({
22388 action: 'clearParsedMp4Captions'
22389 });
22390 }
22391 }
22392 /**
22393 * Remove any data in the source buffer between start and end times
22394 *
22395 * @param {number} start - the start time of the region to remove from the buffer
22396 * @param {number} end - the end time of the region to remove from the buffer
22397 * @param {Function} [done] - an optional callback to be executed when the remove
22398 * @param {boolean} force - force all remove operations to happen
22399 * operation is complete
22400 */
22401 ;
22402
22403 _proto.remove = function remove(start, end, done, force) {
22404 if (done === void 0) {
22405 done = function done() {};
22406 }
22407
22408 if (force === void 0) {
22409 force = false;
22410 }
22411
22412 // clamp end to duration if we need to remove everything.
22413 // This is due to a browser bug that causes issues if we remove to Infinity.
22414 // videojs/videojs-contrib-hls#1225
22415 if (end === Infinity) {
22416 end = this.duration_();
22417 } // skip removes that would throw an error
22418 // commonly happens during a rendition switch at the start of a video
22419 // from start 0 to end 0
22420
22421
22422 if (end <= start) {
22423 this.logger_('skipping remove because end ${end} is <= start ${start}');
22424 return;
22425 }
22426
22427 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
22428 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
22429
22430 return;
22431 } // set it to one to complete this function's removes
22432
22433
22434 var removesRemaining = 1;
22435
22436 var removeFinished = function removeFinished() {
22437 removesRemaining--;
22438
22439 if (removesRemaining === 0) {
22440 done();
22441 }
22442 };
22443
22444 if (force || !this.audioDisabled_) {
22445 removesRemaining++;
22446 this.sourceUpdater_.removeAudio(start, end, removeFinished);
22447 } // While it would be better to only remove video if the main loader has video, this
22448 // should be safe with audio only as removeVideo will call back even if there's no
22449 // video buffer.
22450 //
22451 // In theory we can check to see if there's video before calling the remove, but in
22452 // the event that we're switching between renditions and from video to audio only
22453 // (when we add support for that), we may need to clear the video contents despite
22454 // what the new media will contain.
22455
22456
22457 if (force || this.loaderType_ === 'main') {
22458 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
22459 removesRemaining++;
22460 this.sourceUpdater_.removeVideo(start, end, removeFinished);
22461 } // remove any captions and ID3 tags
22462
22463
22464 for (var track in this.inbandTextTracks_) {
22465 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
22466 }
22467
22468 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
22469
22470 removeFinished();
22471 }
22472 /**
22473 * (re-)schedule monitorBufferTick_ to run as soon as possible
22474 *
22475 * @private
22476 */
22477 ;
22478
22479 _proto.monitorBuffer_ = function monitorBuffer_() {
22480 if (this.checkBufferTimeout_) {
22481 window.clearTimeout(this.checkBufferTimeout_);
22482 }
22483
22484 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);
22485 }
22486 /**
22487 * As long as the SegmentLoader is in the READY state, periodically
22488 * invoke fillBuffer_().
22489 *
22490 * @private
22491 */
22492 ;
22493
22494 _proto.monitorBufferTick_ = function monitorBufferTick_() {
22495 if (this.state === 'READY') {
22496 this.fillBuffer_();
22497 }
22498
22499 if (this.checkBufferTimeout_) {
22500 window.clearTimeout(this.checkBufferTimeout_);
22501 }
22502
22503 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
22504 }
22505 /**
22506 * fill the buffer with segements unless the sourceBuffers are
22507 * currently updating
22508 *
22509 * Note: this function should only ever be called by monitorBuffer_
22510 * and never directly
22511 *
22512 * @private
22513 */
22514 ;
22515
22516 _proto.fillBuffer_ = function fillBuffer_() {
22517 // TODO since the source buffer maintains a queue, and we shouldn't call this function
22518 // except when we're ready for the next segment, this check can most likely be removed
22519 if (this.sourceUpdater_.updating()) {
22520 return;
22521 } // see if we need to begin loading immediately
22522
22523
22524 var segmentInfo = this.chooseNextRequest_();
22525
22526 if (!segmentInfo) {
22527 return;
22528 }
22529
22530 if (typeof segmentInfo.timestampOffset === 'number') {
22531 this.isPendingTimestampOffset_ = false;
22532 this.timelineChangeController_.pendingTimelineChange({
22533 type: this.loaderType_,
22534 from: this.currentTimeline_,
22535 to: segmentInfo.timeline
22536 });
22537 }
22538
22539 this.loadSegment_(segmentInfo);
22540 }
22541 /**
22542 * Determines if we should call endOfStream on the media source based
22543 * on the state of the buffer or if appened segment was the final
22544 * segment in the playlist.
22545 *
22546 * @param {number} [mediaIndex] the media index of segment we last appended
22547 * @param {Object} [playlist] a media playlist object
22548 * @return {boolean} do we need to call endOfStream on the MediaSource
22549 */
22550 ;
22551
22552 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
22553 if (mediaIndex === void 0) {
22554 mediaIndex = this.mediaIndex;
22555 }
22556
22557 if (playlist === void 0) {
22558 playlist = this.playlist_;
22559 }
22560
22561 if (partIndex === void 0) {
22562 partIndex = this.partIndex;
22563 }
22564
22565 if (!playlist || !this.mediaSource_) {
22566 return false;
22567 }
22568
22569 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
22570
22571 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
22572
22573 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
22574 // so that MediaSources can trigger the `ended` event when it runs out of
22575 // buffered data instead of waiting for me
22576
22577 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
22578 }
22579 /**
22580 * Determines what request should be made given current segment loader state.
22581 *
22582 * @return {Object} a request object that describes the segment/part to load
22583 */
22584 ;
22585
22586 _proto.chooseNextRequest_ = function chooseNextRequest_() {
22587 var buffered = this.buffered_();
22588 var bufferedEnd = lastBufferedEnd(buffered) || 0;
22589 var bufferedTime = timeAheadOf(buffered, this.currentTime_());
22590 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
22591 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
22592 var segments = this.playlist_.segments; // return no segment if:
22593 // 1. we don't have segments
22594 // 2. The video has not yet played and we already downloaded a segment
22595 // 3. we already have enough buffered time
22596
22597 if (!segments.length || preloaded || haveEnoughBuffer) {
22598 return null;
22599 }
22600
22601 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
22602 var next = {
22603 partIndex: null,
22604 mediaIndex: null,
22605 startOfSegment: null,
22606 playlist: this.playlist_,
22607 isSyncRequest: Boolean(!this.syncPoint_)
22608 };
22609
22610 if (next.isSyncRequest) {
22611 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
22612 } else if (this.mediaIndex !== null) {
22613 var segment = segments[this.mediaIndex];
22614 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
22615 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
22616
22617 if (segment.parts && segment.parts[partIndex + 1]) {
22618 next.mediaIndex = this.mediaIndex;
22619 next.partIndex = partIndex + 1;
22620 } else {
22621 next.mediaIndex = this.mediaIndex + 1;
22622 }
22623 } else {
22624 // Find the segment containing the end of the buffer or current time.
22625 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
22626 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
22627 playlist: this.playlist_,
22628 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
22629 startingPartIndex: this.syncPoint_.partIndex,
22630 startingSegmentIndex: this.syncPoint_.segmentIndex,
22631 startTime: this.syncPoint_.time
22632 }),
22633 segmentIndex = _Playlist$getMediaInf.segmentIndex,
22634 startTime = _Playlist$getMediaInf.startTime,
22635 _partIndex = _Playlist$getMediaInf.partIndex;
22636
22637 next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
22638 next.mediaIndex = segmentIndex;
22639 next.startOfSegment = startTime;
22640 next.partIndex = _partIndex;
22641 }
22642
22643 var nextSegment = segments[next.mediaIndex];
22644 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
22645 // the next partIndex is invalid do not choose a next segment.
22646
22647 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
22648 return null;
22649 } // if the next segment has parts, and we don't have a partIndex.
22650 // Set partIndex to 0
22651
22652
22653 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
22654 next.partIndex = 0;
22655 nextPart = nextSegment.parts[0];
22656 } // if we have no buffered data then we need to make sure
22657 // that the next part we append is "independent" if possible.
22658 // So we check if the previous part is independent, and request
22659 // it if it is.
22660
22661
22662 if (!bufferedTime && nextPart && !nextPart.independent) {
22663 if (next.partIndex === 0) {
22664 var lastSegment = segments[next.mediaIndex - 1];
22665 var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
22666
22667 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
22668 next.mediaIndex -= 1;
22669 next.partIndex = lastSegment.parts.length - 1;
22670 next.independent = 'previous segment';
22671 }
22672 } else if (nextSegment.parts[next.partIndex - 1].independent) {
22673 next.partIndex -= 1;
22674 next.independent = 'previous part';
22675 }
22676 }
22677
22678 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
22679 // 1. this is the last segment in the playlist
22680 // 2. end of stream has been called on the media source already
22681 // 3. the player is not seeking
22682
22683 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
22684 return null;
22685 }
22686
22687 return this.generateSegmentInfo_(next);
22688 };
22689
22690 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
22691 var independent = options.independent,
22692 playlist = options.playlist,
22693 mediaIndex = options.mediaIndex,
22694 startOfSegment = options.startOfSegment,
22695 isSyncRequest = options.isSyncRequest,
22696 partIndex = options.partIndex,
22697 forceTimestampOffset = options.forceTimestampOffset,
22698 getMediaInfoForTime = options.getMediaInfoForTime;
22699 var segment = playlist.segments[mediaIndex];
22700 var part = typeof partIndex === 'number' && segment.parts[partIndex];
22701 var segmentInfo = {
22702 requestId: 'segment-loader-' + Math.random(),
22703 // resolve the segment URL relative to the playlist
22704 uri: part && part.resolvedUri || segment.resolvedUri,
22705 // the segment's mediaIndex at the time it was requested
22706 mediaIndex: mediaIndex,
22707 partIndex: part ? partIndex : null,
22708 // whether or not to update the SegmentLoader's state with this
22709 // segment's mediaIndex
22710 isSyncRequest: isSyncRequest,
22711 startOfSegment: startOfSegment,
22712 // the segment's playlist
22713 playlist: playlist,
22714 // unencrypted bytes of the segment
22715 bytes: null,
22716 // when a key is defined for this segment, the encrypted bytes
22717 encryptedBytes: null,
22718 // The target timestampOffset for this segment when we append it
22719 // to the source buffer
22720 timestampOffset: null,
22721 // The timeline that the segment is in
22722 timeline: segment.timeline,
22723 // The expected duration of the segment in seconds
22724 duration: part && part.duration || segment.duration,
22725 // retain the segment in case the playlist updates while doing an async process
22726 segment: segment,
22727 part: part,
22728 byteLength: 0,
22729 transmuxer: this.transmuxer_,
22730 // type of getMediaInfoForTime that was used to get this segment
22731 getMediaInfoForTime: getMediaInfoForTime,
22732 independent: independent
22733 };
22734 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
22735 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
22736 segmentTimeline: segment.timeline,
22737 currentTimeline: this.currentTimeline_,
22738 startOfSegment: startOfSegment,
22739 buffered: this.buffered_(),
22740 overrideCheck: overrideCheck
22741 });
22742 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
22743
22744 if (typeof audioBufferedEnd === 'number') {
22745 // since the transmuxer is using the actual timing values, but the buffer is
22746 // adjusted by the timestamp offset, we must adjust the value here
22747 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
22748 }
22749
22750 if (this.sourceUpdater_.videoBuffered().length) {
22751 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
22752 // adjusted by the timestmap offset, we must adjust the value here
22753 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
22754 }
22755
22756 return segmentInfo;
22757 } // get the timestampoffset for a segment,
22758 // added so that vtt segment loader can override and prevent
22759 // adding timestamp offsets.
22760 ;
22761
22762 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
22763 return timestampOffsetForSegment(options);
22764 }
22765 /**
22766 * Determines if the network has enough bandwidth to complete the current segment
22767 * request in a timely manner. If not, the request will be aborted early and bandwidth
22768 * updated to trigger a playlist switch.
22769 *
22770 * @param {Object} stats
22771 * Object containing stats about the request timing and size
22772 * @private
22773 */
22774 ;
22775
22776 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
22777 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
22778 // TODO: Replace using timeout with a boolean indicating whether this playlist is
22779 // the lowestEnabledRendition.
22780 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
22781 !this.playlist_.attributes.BANDWIDTH) {
22782 return;
22783 } // Wait at least 1 second since the first byte of data has been received before
22784 // using the calculated bandwidth from the progress event to allow the bitrate
22785 // to stabilize
22786
22787
22788 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
22789 return;
22790 }
22791
22792 var currentTime = this.currentTime_();
22793 var measuredBandwidth = stats.bandwidth;
22794 var segmentDuration = this.pendingSegment_.duration;
22795 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
22796 // if we are only left with less than 1 second when the request completes.
22797 // A negative timeUntilRebuffering indicates we are already rebuffering
22798
22799 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
22800 // is larger than the estimated time until the player runs out of forward buffer
22801
22802 if (requestTimeRemaining <= timeUntilRebuffer$1) {
22803 return;
22804 }
22805
22806 var switchCandidate = minRebufferMaxBandwidthSelector({
22807 master: this.vhs_.playlists.master,
22808 currentTime: currentTime,
22809 bandwidth: measuredBandwidth,
22810 duration: this.duration_(),
22811 segmentDuration: segmentDuration,
22812 timeUntilRebuffer: timeUntilRebuffer$1,
22813 currentTimeline: this.currentTimeline_,
22814 syncController: this.syncController_
22815 });
22816
22817 if (!switchCandidate) {
22818 return;
22819 }
22820
22821 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
22822 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
22823 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
22824 // potential round trip time of the new request so that we are not too aggressive
22825 // with switching to a playlist that might save us a fraction of a second.
22826
22827 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
22828 minimumTimeSaving = 1;
22829 }
22830
22831 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
22832 return;
22833 } // set the bandwidth to that of the desired playlist being sure to scale by
22834 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
22835 // don't trigger a bandwidthupdate as the bandwidth is artifial
22836
22837
22838 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
22839 this.trigger('earlyabort');
22840 };
22841
22842 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
22843 this.logger_("Aborting " + segmentInfoString(segmentInfo));
22844 this.mediaRequestsAborted += 1;
22845 }
22846 /**
22847 * XHR `progress` event handler
22848 *
22849 * @param {Event}
22850 * The XHR `progress` event
22851 * @param {Object} simpleSegment
22852 * A simplified segment object copy
22853 * @private
22854 */
22855 ;
22856
22857 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
22858 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22859
22860 if (this.checkForAbort_(simpleSegment.requestId)) {
22861 return;
22862 }
22863
22864 this.trigger('progress');
22865 };
22866
22867 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
22868 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22869
22870 if (this.checkForAbort_(simpleSegment.requestId)) {
22871 return;
22872 }
22873
22874 if (this.checkForIllegalMediaSwitch(trackInfo)) {
22875 return;
22876 }
22877
22878 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
22879 // Guard against cases where we're not getting track info at all until we are
22880 // certain that all streams will provide it.
22881
22882 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
22883 this.appendInitSegment_ = {
22884 audio: true,
22885 video: true
22886 };
22887 this.startingMediaInfo_ = trackInfo;
22888 this.currentMediaInfo_ = trackInfo;
22889 this.logger_('trackinfo update', trackInfo);
22890 this.trigger('trackinfo');
22891 } // trackinfo may cause an abort if the trackinfo
22892 // causes a codec change to an unsupported codec.
22893
22894
22895 if (this.checkForAbort_(simpleSegment.requestId)) {
22896 return;
22897 } // set trackinfo on the pending segment so that
22898 // it can append.
22899
22900
22901 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
22902
22903 if (this.hasEnoughInfoToAppend_()) {
22904 this.processCallQueue_();
22905 }
22906 };
22907
22908 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
22909 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22910
22911 if (this.checkForAbort_(simpleSegment.requestId)) {
22912 return;
22913 }
22914
22915 var segmentInfo = this.pendingSegment_;
22916 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
22917 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
22918 segmentInfo[timingInfoProperty][timeType] = time;
22919 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
22920
22921 if (this.hasEnoughInfoToAppend_()) {
22922 this.processCallQueue_();
22923 }
22924 };
22925
22926 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
22927 var _this2 = this;
22928
22929 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22930
22931 if (this.checkForAbort_(simpleSegment.requestId)) {
22932 return;
22933 } // This could only happen with fmp4 segments, but
22934 // should still not happen in general
22935
22936
22937 if (captionData.length === 0) {
22938 this.logger_('SegmentLoader received no captions from a caption event');
22939 return;
22940 }
22941
22942 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
22943 // can be adjusted by the timestamp offset
22944
22945 if (!segmentInfo.hasAppendedData_) {
22946 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
22947 return;
22948 }
22949
22950 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
22951 var captionTracks = {}; // get total start/end and captions for each track/stream
22952
22953 captionData.forEach(function (caption) {
22954 // caption.stream is actually a track name...
22955 // set to the existing values in tracks or default values
22956 captionTracks[caption.stream] = captionTracks[caption.stream] || {
22957 // Infinity, as any other value will be less than this
22958 startTime: Infinity,
22959 captions: [],
22960 // 0 as an other value will be more than this
22961 endTime: 0
22962 };
22963 var captionTrack = captionTracks[caption.stream];
22964 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
22965 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
22966 captionTrack.captions.push(caption);
22967 });
22968 Object.keys(captionTracks).forEach(function (trackName) {
22969 var _captionTracks$trackN = captionTracks[trackName],
22970 startTime = _captionTracks$trackN.startTime,
22971 endTime = _captionTracks$trackN.endTime,
22972 captions = _captionTracks$trackN.captions;
22973 var inbandTextTracks = _this2.inbandTextTracks_;
22974
22975 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
22976
22977 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
22978 // We do this because a rendition change that also changes the timescale for captions
22979 // will result in captions being re-parsed for certain segments. If we add them again
22980 // without clearing we will have two of the same captions visible.
22981
22982 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
22983 addCaptionData({
22984 captionArray: captions,
22985 inbandTextTracks: inbandTextTracks,
22986 timestampOffset: timestampOffset
22987 });
22988 }); // Reset stored captions since we added parsed
22989 // captions to a text track at this point
22990
22991 if (this.transmuxer_) {
22992 this.transmuxer_.postMessage({
22993 action: 'clearParsedMp4Captions'
22994 });
22995 }
22996 };
22997
22998 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
22999 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23000
23001 if (this.checkForAbort_(simpleSegment.requestId)) {
23002 return;
23003 }
23004
23005 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
23006
23007 if (!segmentInfo.hasAppendedData_) {
23008 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
23009 return;
23010 }
23011
23012 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
23013 // audio/video source with a metadata track, and an alt audio with a metadata track.
23014 // However, this probably won't happen, and if it does it can be handled then.
23015
23016 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
23017 addMetadata({
23018 inbandTextTracks: this.inbandTextTracks_,
23019 metadataArray: id3Frames,
23020 timestampOffset: timestampOffset,
23021 videoDuration: this.duration_()
23022 });
23023 };
23024
23025 _proto.processMetadataQueue_ = function processMetadataQueue_() {
23026 this.metadataQueue_.id3.forEach(function (fn) {
23027 return fn();
23028 });
23029 this.metadataQueue_.caption.forEach(function (fn) {
23030 return fn();
23031 });
23032 this.metadataQueue_.id3 = [];
23033 this.metadataQueue_.caption = [];
23034 };
23035
23036 _proto.processCallQueue_ = function processCallQueue_() {
23037 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
23038 // functions may check the length of the load queue and default to pushing themselves
23039 // back onto the queue.
23040
23041 this.callQueue_ = [];
23042 callQueue.forEach(function (fun) {
23043 return fun();
23044 });
23045 };
23046
23047 _proto.processLoadQueue_ = function processLoadQueue_() {
23048 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
23049 // functions may check the length of the load queue and default to pushing themselves
23050 // back onto the queue.
23051
23052 this.loadQueue_ = [];
23053 loadQueue.forEach(function (fun) {
23054 return fun();
23055 });
23056 }
23057 /**
23058 * Determines whether the loader has enough info to load the next segment.
23059 *
23060 * @return {boolean}
23061 * Whether or not the loader has enough info to load the next segment
23062 */
23063 ;
23064
23065 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
23066 // Since primary timing goes by video, only the audio loader potentially needs to wait
23067 // to load.
23068 if (this.loaderType_ !== 'audio') {
23069 return true;
23070 }
23071
23072 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
23073 // enough info to load.
23074
23075 if (!segmentInfo) {
23076 return false;
23077 } // The first segment can and should be loaded immediately so that source buffers are
23078 // created together (before appending). Source buffer creation uses the presence of
23079 // audio and video data to determine whether to create audio/video source buffers, and
23080 // uses processed (transmuxed or parsed) media to determine the types required.
23081
23082
23083 if (!this.getCurrentMediaInfo_()) {
23084 return true;
23085 }
23086
23087 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
23088 // can be requested and downloaded and only wait before it is transmuxed or parsed.
23089 // But in practice, there are a few reasons why it is better to wait until a loader
23090 // is ready to append that segment before requesting and downloading:
23091 //
23092 // 1. Because audio and main loaders cross discontinuities together, if this loader
23093 // is waiting for the other to catch up, then instead of requesting another
23094 // segment and using up more bandwidth, by not yet loading, more bandwidth is
23095 // allotted to the loader currently behind.
23096 // 2. media-segment-request doesn't have to have logic to consider whether a segment
23097 // is ready to be processed or not, isolating the queueing behavior to the loader.
23098 // 3. The audio loader bases some of its segment properties on timing information
23099 // provided by the main loader, meaning that, if the logic for waiting on
23100 // processing was in media-segment-request, then it would also need to know how
23101 // to re-generate the segment information after the main loader caught up.
23102 shouldWaitForTimelineChange({
23103 timelineChangeController: this.timelineChangeController_,
23104 currentTimeline: this.currentTimeline_,
23105 segmentTimeline: segmentInfo.timeline,
23106 loaderType: this.loaderType_,
23107 audioDisabled: this.audioDisabled_
23108 })) {
23109 return false;
23110 }
23111
23112 return true;
23113 };
23114
23115 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
23116 if (segmentInfo === void 0) {
23117 segmentInfo = this.pendingSegment_;
23118 }
23119
23120 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
23121 };
23122
23123 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
23124 if (segmentInfo === void 0) {
23125 segmentInfo = this.pendingSegment_;
23126 }
23127
23128 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
23129 };
23130
23131 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
23132 if (!this.sourceUpdater_.ready()) {
23133 return false;
23134 } // If content needs to be removed or the loader is waiting on an append reattempt,
23135 // then no additional content should be appended until the prior append is resolved.
23136
23137
23138 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
23139 return false;
23140 }
23141
23142 var segmentInfo = this.pendingSegment_;
23143 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
23144 // we do not have information on this specific
23145 // segment yet
23146
23147 if (!segmentInfo || !trackInfo) {
23148 return false;
23149 }
23150
23151 var hasAudio = trackInfo.hasAudio,
23152 hasVideo = trackInfo.hasVideo,
23153 isMuxed = trackInfo.isMuxed;
23154
23155 if (hasVideo && !segmentInfo.videoTimingInfo) {
23156 return false;
23157 } // muxed content only relies on video timing information for now.
23158
23159
23160 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
23161 return false;
23162 }
23163
23164 if (shouldWaitForTimelineChange({
23165 timelineChangeController: this.timelineChangeController_,
23166 currentTimeline: this.currentTimeline_,
23167 segmentTimeline: segmentInfo.timeline,
23168 loaderType: this.loaderType_,
23169 audioDisabled: this.audioDisabled_
23170 })) {
23171 return false;
23172 }
23173
23174 return true;
23175 };
23176
23177 _proto.handleData_ = function handleData_(simpleSegment, result) {
23178 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23179
23180 if (this.checkForAbort_(simpleSegment.requestId)) {
23181 return;
23182 } // If there's anything in the call queue, then this data came later and should be
23183 // executed after the calls currently queued.
23184
23185
23186 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
23187 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
23188 return;
23189 }
23190
23191 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
23192
23193 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
23194
23195 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
23196 // logic may change behavior depending on the state, and changing state too early may
23197 // inflate our estimates of bandwidth. In the future this should be re-examined to
23198 // note more granular states.
23199 // don't process and append data if the mediaSource is closed
23200
23201 if (this.mediaSource_.readyState === 'closed') {
23202 return;
23203 } // if this request included an initialization segment, save that data
23204 // to the initSegment cache
23205
23206
23207 if (simpleSegment.map) {
23208 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
23209
23210 segmentInfo.segment.map = simpleSegment.map;
23211 } // if this request included a segment key, save that data in the cache
23212
23213
23214 if (simpleSegment.key) {
23215 this.segmentKey(simpleSegment.key, true);
23216 }
23217
23218 segmentInfo.isFmp4 = simpleSegment.isFmp4;
23219 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
23220
23221 if (segmentInfo.isFmp4) {
23222 this.trigger('fmp4');
23223 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
23224 } else {
23225 var trackInfo = this.getCurrentMediaInfo_();
23226 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
23227 var firstVideoFrameTimeForData;
23228
23229 if (useVideoTimingInfo) {
23230 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
23231 } // Segment loader knows more about segment timing than the transmuxer (in certain
23232 // aspects), so make any changes required for a more accurate start time.
23233 // Don't set the end time yet, as the segment may not be finished processing.
23234
23235
23236 segmentInfo.timingInfo.start = this.trueSegmentStart_({
23237 currentStart: segmentInfo.timingInfo.start,
23238 playlist: segmentInfo.playlist,
23239 mediaIndex: segmentInfo.mediaIndex,
23240 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
23241 useVideoTimingInfo: useVideoTimingInfo,
23242 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
23243 videoTimingInfo: segmentInfo.videoTimingInfo,
23244 audioTimingInfo: segmentInfo.audioTimingInfo
23245 });
23246 } // Init segments for audio and video only need to be appended in certain cases. Now
23247 // that data is about to be appended, we can check the final cases to determine
23248 // whether we should append an init segment.
23249
23250
23251 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
23252 // as we use the start of the segment to offset the best guess (playlist provided)
23253 // timestamp offset.
23254
23255 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
23256 // be appended or not.
23257
23258 if (segmentInfo.isSyncRequest) {
23259 // first save/update our timing info for this segment.
23260 // this is what allows us to choose an accurate segment
23261 // and the main reason we make a sync request.
23262 this.updateTimingInfoEnd_(segmentInfo);
23263 this.syncController_.saveSegmentTimingInfo({
23264 segmentInfo: segmentInfo,
23265 shouldSaveTimelineMapping: this.loaderType_ === 'main'
23266 });
23267 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
23268 // after taking into account its timing info, do not append it.
23269
23270 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
23271 this.logger_('sync segment was incorrect, not appending');
23272 return;
23273 } // otherwise append it like any other segment as our guess was correct.
23274
23275
23276 this.logger_('sync segment was correct, appending');
23277 } // Save some state so that in the future anything waiting on first append (and/or
23278 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
23279 // we need some notion of whether the timestamp offset or other relevant information
23280 // has had a chance to be set.
23281
23282
23283 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
23284
23285 this.processMetadataQueue_();
23286 this.appendData_(segmentInfo, result);
23287 };
23288
23289 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
23290 // alt audio doesn't manage timestamp offset
23291 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
23292 // segment for each chunk
23293 !segmentInfo.changedTimestampOffset) {
23294 // if the timestamp offset changed, the timeline may have changed, so we have to re-
23295 // append init segments
23296 this.appendInitSegment_ = {
23297 audio: true,
23298 video: true
23299 };
23300 }
23301
23302 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
23303 // make sure we append init segment on playlist changes, in case the media config
23304 // changed
23305 this.appendInitSegment_[type] = true;
23306 }
23307 };
23308
23309 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
23310 var type = _ref4.type,
23311 initSegment = _ref4.initSegment,
23312 map = _ref4.map,
23313 playlist = _ref4.playlist;
23314
23315 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
23316 // (Section 3) required to parse the applicable Media Segments. It applies to every
23317 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
23318 // or until the end of the playlist."
23319 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
23320 if (map) {
23321 var id = initSegmentId(map);
23322
23323 if (this.activeInitSegmentId_ === id) {
23324 // don't need to re-append the init segment if the ID matches
23325 return null;
23326 } // a map-specified init segment takes priority over any transmuxed (or otherwise
23327 // obtained) init segment
23328 //
23329 // this also caches the init segment for later use
23330
23331
23332 initSegment = this.initSegmentForMap(map, true).bytes;
23333 this.activeInitSegmentId_ = id;
23334 } // We used to always prepend init segments for video, however, that shouldn't be
23335 // necessary. Instead, we should only append on changes, similar to what we've always
23336 // done for audio. This is more important (though may not be that important) for
23337 // frame-by-frame appending for LHLS, simply because of the increased quantity of
23338 // appends.
23339
23340
23341 if (initSegment && this.appendInitSegment_[type]) {
23342 // Make sure we track the playlist that we last used for the init segment, so that
23343 // we can re-append the init segment in the event that we get data from a new
23344 // playlist. Discontinuities and track changes are handled in other sections.
23345 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
23346
23347 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
23348 // we are appending the muxer init segment
23349
23350 this.activeInitSegmentId_ = null;
23351 return initSegment;
23352 }
23353
23354 return null;
23355 };
23356
23357 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
23358 var _this3 = this;
23359
23360 var segmentInfo = _ref5.segmentInfo,
23361 type = _ref5.type,
23362 bytes = _ref5.bytes;
23363 var audioBuffered = this.sourceUpdater_.audioBuffered();
23364 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
23365 // should be cleared out during the buffer removals. However, log in case it helps
23366 // debug.
23367
23368 if (audioBuffered.length > 1) {
23369 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
23370 }
23371
23372 if (videoBuffered.length > 1) {
23373 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
23374 }
23375
23376 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
23377 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
23378 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
23379 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
23380
23381 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
23382 // Can't remove enough buffer to make room for new segment (or the browser doesn't
23383 // allow for appends of segments this size). In the future, it may be possible to
23384 // split up the segment and append in pieces, but for now, error out this playlist
23385 // in an attempt to switch to a more manageable rendition.
23386 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
23387 this.error({
23388 message: 'Quota exceeded error with append of a single segment of content',
23389 excludeUntil: Infinity
23390 });
23391 this.trigger('error');
23392 return;
23393 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
23394 // that the segment-loader should block on future events until this one is handled, so
23395 // that it doesn't keep moving onto further segments. Adding the call to the call
23396 // queue will prevent further appends until waitingOnRemove_ and
23397 // quotaExceededErrorRetryTimeout_ are cleared.
23398 //
23399 // Note that this will only block the current loader. In the case of demuxed content,
23400 // the other load may keep filling as fast as possible. In practice, this should be
23401 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
23402 // source buffer, or video fills without enough room for audio to append (and without
23403 // the availability of clearing out seconds of back buffer to make room for audio).
23404 // But it might still be good to handle this case in the future as a TODO.
23405
23406
23407 this.waitingOnRemove_ = true;
23408 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
23409 segmentInfo: segmentInfo,
23410 type: type,
23411 bytes: bytes
23412 }));
23413 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
23414 // before retrying.
23415
23416 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
23417 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
23418 this.remove(0, timeToRemoveUntil, function () {
23419 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
23420
23421 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
23422 // attempts (since we can't clear less than the minimum)
23423
23424 _this3.quotaExceededErrorRetryTimeout_ = window.setTimeout(function () {
23425 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
23426
23427 _this3.quotaExceededErrorRetryTimeout_ = null;
23428
23429 _this3.processCallQueue_();
23430 }, MIN_BACK_BUFFER * 1000);
23431 }, true);
23432 };
23433
23434 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
23435 var segmentInfo = _ref6.segmentInfo,
23436 type = _ref6.type,
23437 bytes = _ref6.bytes;
23438
23439 // if there's no error, nothing to do
23440 if (!error) {
23441 return;
23442 }
23443
23444 if (error.code === QUOTA_EXCEEDED_ERR) {
23445 this.handleQuotaExceededError_({
23446 segmentInfo: segmentInfo,
23447 type: type,
23448 bytes: bytes
23449 }); // A quota exceeded error should be recoverable with a future re-append, so no need
23450 // to trigger an append error.
23451
23452 return;
23453 }
23454
23455 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
23456 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
23457 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
23458 //
23459 // Trigger a special error so that it can be handled separately from normal,
23460 // recoverable errors.
23461
23462 this.trigger('appenderror');
23463 };
23464
23465 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
23466 var segmentInfo = _ref7.segmentInfo,
23467 type = _ref7.type,
23468 initSegment = _ref7.initSegment,
23469 data = _ref7.data,
23470 bytes = _ref7.bytes;
23471
23472 // If this is a re-append, bytes were already created and don't need to be recreated
23473 if (!bytes) {
23474 var segments = [data];
23475 var byteLength = data.byteLength;
23476
23477 if (initSegment) {
23478 // if the media initialization segment is changing, append it before the content
23479 // segment
23480 segments.unshift(initSegment);
23481 byteLength += initSegment.byteLength;
23482 } // Technically we should be OK appending the init segment separately, however, we
23483 // haven't yet tested that, and prepending is how we have always done things.
23484
23485
23486 bytes = concatSegments({
23487 bytes: byteLength,
23488 segments: segments
23489 });
23490 }
23491
23492 this.sourceUpdater_.appendBuffer({
23493 segmentInfo: segmentInfo,
23494 type: type,
23495 bytes: bytes
23496 }, this.handleAppendError_.bind(this, {
23497 segmentInfo: segmentInfo,
23498 type: type,
23499 bytes: bytes
23500 }));
23501 };
23502
23503 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
23504 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
23505 return;
23506 }
23507
23508 var segment = this.pendingSegment_.segment;
23509 var timingInfoProperty = type + "TimingInfo";
23510
23511 if (!segment[timingInfoProperty]) {
23512 segment[timingInfoProperty] = {};
23513 }
23514
23515 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
23516 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
23517 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
23518 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
23519 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
23520
23521 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
23522 };
23523
23524 _proto.appendData_ = function appendData_(segmentInfo, result) {
23525 var type = result.type,
23526 data = result.data;
23527
23528 if (!data || !data.byteLength) {
23529 return;
23530 }
23531
23532 if (type === 'audio' && this.audioDisabled_) {
23533 return;
23534 }
23535
23536 var initSegment = this.getInitSegmentAndUpdateState_({
23537 type: type,
23538 initSegment: result.initSegment,
23539 playlist: segmentInfo.playlist,
23540 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
23541 });
23542 this.appendToSourceBuffer_({
23543 segmentInfo: segmentInfo,
23544 type: type,
23545 initSegment: initSegment,
23546 data: data
23547 });
23548 }
23549 /**
23550 * load a specific segment from a request into the buffer
23551 *
23552 * @private
23553 */
23554 ;
23555
23556 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
23557 var _this4 = this;
23558
23559 this.state = 'WAITING';
23560 this.pendingSegment_ = segmentInfo;
23561 this.trimBackBuffer_(segmentInfo);
23562
23563 if (typeof segmentInfo.timestampOffset === 'number') {
23564 if (this.transmuxer_) {
23565 this.transmuxer_.postMessage({
23566 action: 'clearAllMp4Captions'
23567 });
23568 }
23569 }
23570
23571 if (!this.hasEnoughInfoToLoad_()) {
23572 this.loadQueue_.push(function () {
23573 // regenerate the audioAppendStart, timestampOffset, etc as they
23574 // may have changed since this function was added to the queue.
23575 var options = _extends_1({}, segmentInfo, {
23576 forceTimestampOffset: true
23577 });
23578
23579 _extends_1(segmentInfo, _this4.generateSegmentInfo_(options));
23580
23581 _this4.isPendingTimestampOffset_ = false;
23582
23583 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
23584 });
23585 return;
23586 }
23587
23588 this.updateTransmuxerAndRequestSegment_(segmentInfo);
23589 };
23590
23591 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
23592 var _this5 = this;
23593
23594 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
23595 // the transmuxer still needs to be updated before then.
23596 //
23597 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
23598 // offset must be passed to the transmuxer for stream correcting adjustments.
23599 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
23600 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
23601
23602 segmentInfo.gopsToAlignWith = [];
23603 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
23604
23605 this.transmuxer_.postMessage({
23606 action: 'reset'
23607 });
23608 this.transmuxer_.postMessage({
23609 action: 'setTimestampOffset',
23610 timestampOffset: segmentInfo.timestampOffset
23611 });
23612 }
23613
23614 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
23615 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
23616 var isWalkingForward = this.mediaIndex !== null;
23617 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
23618 // the first timeline
23619 segmentInfo.timeline > 0;
23620 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
23621 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
23622 // then this init segment has never been seen before and should be appended.
23623 //
23624 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
23625 // both to true and leave the decision of whether to append the init segment to append time.
23626
23627 if (simpleSegment.map && !simpleSegment.map.bytes) {
23628 this.logger_('going to request init segment.');
23629 this.appendInitSegment_ = {
23630 video: true,
23631 audio: true
23632 };
23633 }
23634
23635 segmentInfo.abortRequests = mediaSegmentRequest({
23636 xhr: this.vhs_.xhr,
23637 xhrOptions: this.xhrOptions_,
23638 decryptionWorker: this.decrypter_,
23639 segment: simpleSegment,
23640 abortFn: this.handleAbort_.bind(this, segmentInfo),
23641 progressFn: this.handleProgress_.bind(this),
23642 trackInfoFn: this.handleTrackInfo_.bind(this),
23643 timingInfoFn: this.handleTimingInfo_.bind(this),
23644 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
23645 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
23646 captionsFn: this.handleCaptions_.bind(this),
23647 isEndOfTimeline: isEndOfTimeline,
23648 endedTimelineFn: function endedTimelineFn() {
23649 _this5.logger_('received endedtimeline callback');
23650 },
23651 id3Fn: this.handleId3_.bind(this),
23652 dataFn: this.handleData_.bind(this),
23653 doneFn: this.segmentRequestFinished_.bind(this),
23654 onTransmuxerLog: function onTransmuxerLog(_ref8) {
23655 var message = _ref8.message,
23656 level = _ref8.level,
23657 stream = _ref8.stream;
23658
23659 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
23660 }
23661 });
23662 }
23663 /**
23664 * trim the back buffer so that we don't have too much data
23665 * in the source buffer
23666 *
23667 * @private
23668 *
23669 * @param {Object} segmentInfo - the current segment
23670 */
23671 ;
23672
23673 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
23674 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
23675 // buffer and a very conservative "garbage collector"
23676 // We manually clear out the old buffer to ensure
23677 // we don't trigger the QuotaExceeded error
23678 // on the source buffer during subsequent appends
23679
23680 if (removeToTime > 0) {
23681 this.remove(0, removeToTime);
23682 }
23683 }
23684 /**
23685 * created a simplified copy of the segment object with just the
23686 * information necessary to perform the XHR and decryption
23687 *
23688 * @private
23689 *
23690 * @param {Object} segmentInfo - the current segment
23691 * @return {Object} a simplified segment object copy
23692 */
23693 ;
23694
23695 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
23696 var segment = segmentInfo.segment;
23697 var part = segmentInfo.part;
23698 var simpleSegment = {
23699 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
23700 byterange: part ? part.byterange : segment.byterange,
23701 requestId: segmentInfo.requestId,
23702 transmuxer: segmentInfo.transmuxer,
23703 audioAppendStart: segmentInfo.audioAppendStart,
23704 gopsToAlignWith: segmentInfo.gopsToAlignWith,
23705 part: segmentInfo.part
23706 };
23707 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
23708
23709 if (previousSegment && previousSegment.timeline === segment.timeline) {
23710 // The baseStartTime of a segment is used to handle rollover when probing the TS
23711 // segment to retrieve timing information. Since the probe only looks at the media's
23712 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
23713 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
23714 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
23715 // seconds of media time, so should be used here. The previous segment is used since
23716 // the end of the previous segment should represent the beginning of the current
23717 // segment, so long as they are on the same timeline.
23718 if (previousSegment.videoTimingInfo) {
23719 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
23720 } else if (previousSegment.audioTimingInfo) {
23721 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
23722 }
23723 }
23724
23725 if (segment.key) {
23726 // if the media sequence is greater than 2^32, the IV will be incorrect
23727 // assuming 10s segments, that would be about 1300 years
23728 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
23729 simpleSegment.key = this.segmentKey(segment.key);
23730 simpleSegment.key.iv = iv;
23731 }
23732
23733 if (segment.map) {
23734 simpleSegment.map = this.initSegmentForMap(segment.map);
23735 }
23736
23737 return simpleSegment;
23738 };
23739
23740 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
23741 // every request counts as a media request even if it has been aborted
23742 // or canceled due to a timeout
23743 this.mediaRequests += 1;
23744
23745 if (stats) {
23746 this.mediaBytesTransferred += stats.bytesReceived;
23747 this.mediaTransferDuration += stats.roundTripTime;
23748 }
23749 };
23750
23751 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
23752 // byteLength will be used for throughput, and should be based on bytes receieved,
23753 // which we only know at the end of the request and should reflect total bytes
23754 // downloaded rather than just bytes processed from components of the segment
23755 this.pendingSegment_.byteLength = stats.bytesReceived;
23756
23757 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23758 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23759 return;
23760 }
23761
23762 this.bandwidth = stats.bandwidth;
23763 this.roundTrip = stats.roundTripTime;
23764 };
23765
23766 _proto.handleTimeout_ = function handleTimeout_() {
23767 // although the VTT segment loader bandwidth isn't really used, it's good to
23768 // maintain functinality between segment loaders
23769 this.mediaRequestsTimedout += 1;
23770 this.bandwidth = 1;
23771 this.roundTrip = NaN;
23772 this.trigger('bandwidthupdate');
23773 }
23774 /**
23775 * Handle the callback from the segmentRequest function and set the
23776 * associated SegmentLoader state and errors if necessary
23777 *
23778 * @private
23779 */
23780 ;
23781
23782 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
23783 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
23784 // check the call queue directly since this function doesn't need to deal with any
23785 // data, and can continue even if the source buffers are not set up and we didn't get
23786 // any data from the segment
23787 if (this.callQueue_.length) {
23788 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
23789 return;
23790 }
23791
23792 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
23793
23794 if (!this.pendingSegment_) {
23795 return;
23796 } // the request was aborted and the SegmentLoader has already started
23797 // another request. this can happen when the timeout for an aborted
23798 // request triggers due to a limitation in the XHR library
23799 // do not count this as any sort of request or we risk double-counting
23800
23801
23802 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
23803 return;
23804 } // an error occurred from the active pendingSegment_ so reset everything
23805
23806
23807 if (error) {
23808 this.pendingSegment_ = null;
23809 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
23810
23811 if (error.code === REQUEST_ERRORS.ABORTED) {
23812 return;
23813 }
23814
23815 this.pause(); // the error is really just that at least one of the requests timed-out
23816 // set the bandwidth to a very low value and trigger an ABR switch to
23817 // take emergency action
23818
23819 if (error.code === REQUEST_ERRORS.TIMEOUT) {
23820 this.handleTimeout_();
23821 return;
23822 } // if control-flow has arrived here, then the error is real
23823 // emit an error event to blacklist the current playlist
23824
23825
23826 this.mediaRequestsErrored += 1;
23827 this.error(error);
23828 this.trigger('error');
23829 return;
23830 }
23831
23832 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
23833 // generated for ABR purposes
23834
23835 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
23836 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
23837
23838 if (result.gopInfo) {
23839 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
23840 } // Although we may have already started appending on progress, we shouldn't switch the
23841 // state away from loading until we are officially done loading the segment data.
23842
23843
23844 this.state = 'APPENDING'; // used for testing
23845
23846 this.trigger('appending');
23847 this.waitForAppendsToComplete_(segmentInfo);
23848 };
23849
23850 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
23851 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
23852
23853 if (timelineMapping !== null) {
23854 this.timeMapping_ = timelineMapping;
23855 }
23856 };
23857
23858 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
23859 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
23860 this.mediaSecondsLoaded += segment.end - segment.start;
23861 } else {
23862 this.mediaSecondsLoaded += segment.duration;
23863 }
23864 };
23865
23866 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
23867 if (timestampOffset === null) {
23868 return false;
23869 } // note that we're potentially using the same timestamp offset for both video and
23870 // audio
23871
23872
23873 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23874 return true;
23875 }
23876
23877 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23878 return true;
23879 }
23880
23881 return false;
23882 };
23883
23884 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
23885 var currentStart = _ref9.currentStart,
23886 playlist = _ref9.playlist,
23887 mediaIndex = _ref9.mediaIndex,
23888 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
23889 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
23890 useVideoTimingInfo = _ref9.useVideoTimingInfo,
23891 videoTimingInfo = _ref9.videoTimingInfo,
23892 audioTimingInfo = _ref9.audioTimingInfo;
23893
23894 if (typeof currentStart !== 'undefined') {
23895 // if start was set once, keep using it
23896 return currentStart;
23897 }
23898
23899 if (!useVideoTimingInfo) {
23900 return audioTimingInfo.start;
23901 }
23902
23903 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
23904 // within that segment. Since the transmuxer maintains a cache of incomplete data
23905 // from and/or the last frame seen, the start time may reflect a frame that starts
23906 // in the previous segment. Check for that case and ensure the start time is
23907 // accurate for the segment.
23908
23909 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
23910 return firstVideoFrameTimeForData;
23911 }
23912
23913 return videoTimingInfo.start;
23914 };
23915
23916 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
23917 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
23918
23919 if (!trackInfo) {
23920 this.error({
23921 message: 'No starting media returned, likely due to an unsupported media format.',
23922 blacklistDuration: Infinity
23923 });
23924 this.trigger('error');
23925 return;
23926 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
23927 // on each queue this loader is responsible for to ensure that the appends are
23928 // complete.
23929
23930
23931 var hasAudio = trackInfo.hasAudio,
23932 hasVideo = trackInfo.hasVideo,
23933 isMuxed = trackInfo.isMuxed;
23934 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
23935 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
23936 segmentInfo.waitingOnAppends = 0; // segments with no data
23937
23938 if (!segmentInfo.hasAppendedData_) {
23939 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
23940 // When there's no audio or video data in the segment, there's no audio or video
23941 // timing information.
23942 //
23943 // If there's no audio or video timing information, then the timestamp offset
23944 // can't be adjusted to the appropriate value for the transmuxer and source
23945 // buffers.
23946 //
23947 // Therefore, the next segment should be used to set the timestamp offset.
23948 this.isPendingTimestampOffset_ = true;
23949 } // override settings for metadata only segments
23950
23951
23952 segmentInfo.timingInfo = {
23953 start: 0
23954 };
23955 segmentInfo.waitingOnAppends++;
23956
23957 if (!this.isPendingTimestampOffset_) {
23958 // update the timestampoffset
23959 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
23960 // no video/audio data.
23961
23962 this.processMetadataQueue_();
23963 } // append is "done" instantly with no data.
23964
23965
23966 this.checkAppendsDone_(segmentInfo);
23967 return;
23968 } // Since source updater could call back synchronously, do the increments first.
23969
23970
23971 if (waitForVideo) {
23972 segmentInfo.waitingOnAppends++;
23973 }
23974
23975 if (waitForAudio) {
23976 segmentInfo.waitingOnAppends++;
23977 }
23978
23979 if (waitForVideo) {
23980 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
23981 }
23982
23983 if (waitForAudio) {
23984 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
23985 }
23986 };
23987
23988 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
23989 if (this.checkForAbort_(segmentInfo.requestId)) {
23990 return;
23991 }
23992
23993 segmentInfo.waitingOnAppends--;
23994
23995 if (segmentInfo.waitingOnAppends === 0) {
23996 this.handleAppendsDone_();
23997 }
23998 };
23999
24000 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
24001 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
24002
24003 if (illegalMediaSwitchError) {
24004 this.error({
24005 message: illegalMediaSwitchError,
24006 blacklistDuration: Infinity
24007 });
24008 this.trigger('error');
24009 return true;
24010 }
24011
24012 return false;
24013 };
24014
24015 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
24016 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
24017 // priority, timing-wise, so we must wait
24018 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
24019 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
24020 this.loaderType_ !== 'main') {
24021 return;
24022 }
24023
24024 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
24025 // the timing info here comes from video. In the event that the audio is longer than
24026 // the video, this will trim the start of the audio.
24027 // This also trims any offset from 0 at the beginning of the media
24028
24029 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
24030 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
24031 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
24032 timingInfo: segmentInfo.timingInfo
24033 }); // In the event that there are part segment downloads, each will try to update the
24034 // timestamp offset. Retaining this bit of state prevents us from updating in the
24035 // future (within the same segment), however, there may be a better way to handle it.
24036
24037 segmentInfo.changedTimestampOffset = true;
24038
24039 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
24040 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
24041 didChange = true;
24042 }
24043
24044 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
24045 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
24046 didChange = true;
24047 }
24048
24049 if (didChange) {
24050 this.trigger('timestampoffset');
24051 }
24052 };
24053
24054 _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
24055 var videoTimingInfo = _ref10.videoTimingInfo,
24056 audioTimingInfo = _ref10.audioTimingInfo,
24057 timingInfo = _ref10.timingInfo;
24058
24059 if (!this.useDtsForTimestampOffset_) {
24060 return timingInfo.start;
24061 }
24062
24063 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
24064 return videoTimingInfo.transmuxedDecodeStart;
24065 } // handle audio only
24066
24067
24068 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
24069 return audioTimingInfo.transmuxedDecodeStart;
24070 } // handle content not transmuxed (e.g., MP4)
24071
24072
24073 return timingInfo.start;
24074 };
24075
24076 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
24077 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
24078 var trackInfo = this.getMediaInfo_();
24079 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
24080 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
24081
24082 if (!prioritizedTimingInfo) {
24083 return;
24084 }
24085
24086 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
24087 // current example is the case of fmp4), so use the rough duration to calculate an
24088 // end time.
24089 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
24090 }
24091 /**
24092 * callback to run when appendBuffer is finished. detects if we are
24093 * in a good state to do things with the data we got, or if we need
24094 * to wait for more
24095 *
24096 * @private
24097 */
24098 ;
24099
24100 _proto.handleAppendsDone_ = function handleAppendsDone_() {
24101 // appendsdone can cause an abort
24102 if (this.pendingSegment_) {
24103 this.trigger('appendsdone');
24104 }
24105
24106 if (!this.pendingSegment_) {
24107 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
24108 // all appending cases?
24109
24110 if (!this.paused()) {
24111 this.monitorBuffer_();
24112 }
24113
24114 return;
24115 }
24116
24117 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
24118 // best to wait until all appends are done so we're sure that the primary media is
24119 // finished (and we have its end time).
24120
24121 this.updateTimingInfoEnd_(segmentInfo);
24122
24123 if (this.shouldSaveSegmentTimingInfo_) {
24124 // Timeline mappings should only be saved for the main loader. This is for multiple
24125 // reasons:
24126 //
24127 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
24128 // and the main loader try to save the timeline mapping, whichever comes later
24129 // will overwrite the first. In theory this is OK, as the mappings should be the
24130 // same, however, it breaks for (2)
24131 // 2) In the event of a live stream, the initial live point will make for a somewhat
24132 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
24133 // the mapping will be off for one of the streams, dependent on which one was
24134 // first saved (see (1)).
24135 // 3) Primary timing goes by video in VHS, so the mapping should be video.
24136 //
24137 // Since the audio loader will wait for the main loader to load the first segment,
24138 // the main loader will save the first timeline mapping, and ensure that there won't
24139 // be a case where audio loads two segments without saving a mapping (thus leading
24140 // to missing segment timing info).
24141 this.syncController_.saveSegmentTimingInfo({
24142 segmentInfo: segmentInfo,
24143 shouldSaveTimelineMapping: this.loaderType_ === 'main'
24144 });
24145 }
24146
24147 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
24148
24149 if (segmentDurationMessage) {
24150 if (segmentDurationMessage.severity === 'warn') {
24151 videojs__default["default"].log.warn(segmentDurationMessage.message);
24152 } else {
24153 this.logger_(segmentDurationMessage.message);
24154 }
24155 }
24156
24157 this.recordThroughput_(segmentInfo);
24158 this.pendingSegment_ = null;
24159 this.state = 'READY';
24160
24161 if (segmentInfo.isSyncRequest) {
24162 this.trigger('syncinfoupdate'); // if the sync request was not appended
24163 // then it was not the correct segment.
24164 // throw it away and use the data it gave us
24165 // to get the correct one.
24166
24167 if (!segmentInfo.hasAppendedData_) {
24168 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
24169 return;
24170 }
24171 }
24172
24173 this.logger_("Appended " + segmentInfoString(segmentInfo));
24174 this.addSegmentMetadataCue_(segmentInfo);
24175 this.fetchAtBuffer_ = true;
24176
24177 if (this.currentTimeline_ !== segmentInfo.timeline) {
24178 this.timelineChangeController_.lastTimelineChange({
24179 type: this.loaderType_,
24180 from: this.currentTimeline_,
24181 to: segmentInfo.timeline
24182 }); // If audio is not disabled, the main segment loader is responsible for updating
24183 // the audio timeline as well. If the content is video only, this won't have any
24184 // impact.
24185
24186 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
24187 this.timelineChangeController_.lastTimelineChange({
24188 type: 'audio',
24189 from: this.currentTimeline_,
24190 to: segmentInfo.timeline
24191 });
24192 }
24193 }
24194
24195 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
24196 // the following conditional otherwise it may consider this a bad "guess"
24197 // and attempt to resync when the post-update seekable window and live
24198 // point would mean that this was the perfect segment to fetch
24199
24200 this.trigger('syncinfoupdate');
24201 var segment = segmentInfo.segment;
24202 var part = segmentInfo.part;
24203 var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
24204 var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
24205 // the currentTime_ that means that our conservative guess was too conservative.
24206 // In that case, reset the loader state so that we try to use any information gained
24207 // from the previous request to create a new, more accurate, sync-point.
24208
24209 if (badSegmentGuess || badPartGuess) {
24210 this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
24211 this.resetEverything();
24212 return;
24213 }
24214
24215 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
24216 // and conservatively guess
24217
24218 if (isWalkingForward) {
24219 this.trigger('bandwidthupdate');
24220 }
24221
24222 this.trigger('progress');
24223 this.mediaIndex = segmentInfo.mediaIndex;
24224 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
24225 // buffer, end the stream. this ensures the "ended" event will
24226 // fire if playback reaches that point.
24227
24228 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
24229 this.endOfStream();
24230 } // used for testing
24231
24232
24233 this.trigger('appended');
24234
24235 if (segmentInfo.hasAppendedData_) {
24236 this.mediaAppends++;
24237 }
24238
24239 if (!this.paused()) {
24240 this.monitorBuffer_();
24241 }
24242 }
24243 /**
24244 * Records the current throughput of the decrypt, transmux, and append
24245 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
24246 * moving average of the throughput. `throughput.count` is the number of
24247 * data points in the average.
24248 *
24249 * @private
24250 * @param {Object} segmentInfo the object returned by loadSegment
24251 */
24252 ;
24253
24254 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
24255 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
24256 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
24257 return;
24258 }
24259
24260 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
24261 // by zero in the case where the throughput is ridiculously high
24262
24263 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
24264
24265 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
24266 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
24267
24268 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
24269 }
24270 /**
24271 * Adds a cue to the segment-metadata track with some metadata information about the
24272 * segment
24273 *
24274 * @private
24275 * @param {Object} segmentInfo
24276 * the object returned by loadSegment
24277 * @method addSegmentMetadataCue_
24278 */
24279 ;
24280
24281 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
24282 if (!this.segmentMetadataTrack_) {
24283 return;
24284 }
24285
24286 var segment = segmentInfo.segment;
24287 var start = segment.start;
24288 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
24289
24290 if (!finite(start) || !finite(end)) {
24291 return;
24292 }
24293
24294 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
24295 var Cue = window.WebKitDataCue || window.VTTCue;
24296 var value = {
24297 custom: segment.custom,
24298 dateTimeObject: segment.dateTimeObject,
24299 dateTimeString: segment.dateTimeString,
24300 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
24301 resolution: segmentInfo.playlist.attributes.RESOLUTION,
24302 codecs: segmentInfo.playlist.attributes.CODECS,
24303 byteLength: segmentInfo.byteLength,
24304 uri: segmentInfo.uri,
24305 timeline: segmentInfo.timeline,
24306 playlist: segmentInfo.playlist.id,
24307 start: start,
24308 end: end
24309 };
24310 var data = JSON.stringify(value);
24311 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
24312 // the differences of WebKitDataCue in safari and VTTCue in other browsers
24313
24314 cue.value = value;
24315 this.segmentMetadataTrack_.addCue(cue);
24316 };
24317
24318 return SegmentLoader;
24319 }(videojs__default["default"].EventTarget);
24320
24321 function noop() {}
24322
24323 var toTitleCase = function toTitleCase(string) {
24324 if (typeof string !== 'string') {
24325 return string;
24326 }
24327
24328 return string.replace(/./, function (w) {
24329 return w.toUpperCase();
24330 });
24331 };
24332
24333 var bufferTypes = ['video', 'audio'];
24334
24335 var _updating = function updating(type, sourceUpdater) {
24336 var sourceBuffer = sourceUpdater[type + "Buffer"];
24337 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
24338 };
24339
24340 var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
24341 for (var i = 0; i < queue.length; i++) {
24342 var queueEntry = queue[i];
24343
24344 if (queueEntry.type === 'mediaSource') {
24345 // If the next entry is a media source entry (uses multiple source buffers), block
24346 // processing to allow it to go through first.
24347 return null;
24348 }
24349
24350 if (queueEntry.type === type) {
24351 return i;
24352 }
24353 }
24354
24355 return null;
24356 };
24357
24358 var shiftQueue = function shiftQueue(type, sourceUpdater) {
24359 if (sourceUpdater.queue.length === 0) {
24360 return;
24361 }
24362
24363 var queueIndex = 0;
24364 var queueEntry = sourceUpdater.queue[queueIndex];
24365
24366 if (queueEntry.type === 'mediaSource') {
24367 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
24368 sourceUpdater.queue.shift();
24369 queueEntry.action(sourceUpdater);
24370
24371 if (queueEntry.doneFn) {
24372 queueEntry.doneFn();
24373 } // Only specific source buffer actions must wait for async updateend events. Media
24374 // Source actions process synchronously. Therefore, both audio and video source
24375 // buffers are now clear to process the next queue entries.
24376
24377
24378 shiftQueue('audio', sourceUpdater);
24379 shiftQueue('video', sourceUpdater);
24380 } // Media Source actions require both source buffers, so if the media source action
24381 // couldn't process yet (because one or both source buffers are busy), block other
24382 // queue actions until both are available and the media source action can process.
24383
24384
24385 return;
24386 }
24387
24388 if (type === 'mediaSource') {
24389 // If the queue was shifted by a media source action (this happens when pushing a
24390 // media source action onto the queue), then it wasn't from an updateend event from an
24391 // audio or video source buffer, so there's no change from previous state, and no
24392 // processing should be done.
24393 return;
24394 } // Media source queue entries don't need to consider whether the source updater is
24395 // started (i.e., source buffers are created) as they don't need the source buffers, but
24396 // source buffer queue entries do.
24397
24398
24399 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
24400 return;
24401 }
24402
24403 if (queueEntry.type !== type) {
24404 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
24405
24406 if (queueIndex === null) {
24407 // Either there's no queue entry that uses this source buffer type in the queue, or
24408 // there's a media source queue entry before the next entry of this type, in which
24409 // case wait for that action to process first.
24410 return;
24411 }
24412
24413 queueEntry = sourceUpdater.queue[queueIndex];
24414 }
24415
24416 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
24417 //
24418 // The queue pending operation must be set before the action is performed in the event
24419 // that the action results in a synchronous event that is acted upon. For instance, if
24420 // an exception is thrown that can be handled, it's possible that new actions will be
24421 // appended to an empty queue and immediately executed, but would not have the correct
24422 // pending information if this property was set after the action was performed.
24423
24424 sourceUpdater.queuePending[type] = queueEntry;
24425 queueEntry.action(type, sourceUpdater);
24426
24427 if (!queueEntry.doneFn) {
24428 // synchronous operation, process next entry
24429 sourceUpdater.queuePending[type] = null;
24430 shiftQueue(type, sourceUpdater);
24431 return;
24432 }
24433 };
24434
24435 var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
24436 var buffer = sourceUpdater[type + "Buffer"];
24437 var titleType = toTitleCase(type);
24438
24439 if (!buffer) {
24440 return;
24441 }
24442
24443 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24444 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24445 sourceUpdater.codecs[type] = null;
24446 sourceUpdater[type + "Buffer"] = null;
24447 };
24448
24449 var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
24450 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
24451 };
24452
24453 var actions = {
24454 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
24455 return function (type, sourceUpdater) {
24456 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24457 // or the media source does not contain this source buffer.
24458
24459 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24460 return;
24461 }
24462
24463 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
24464
24465 try {
24466 sourceBuffer.appendBuffer(bytes);
24467 } catch (e) {
24468 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
24469 sourceUpdater.queuePending[type] = null;
24470 onError(e);
24471 }
24472 };
24473 },
24474 remove: function remove(start, end) {
24475 return function (type, sourceUpdater) {
24476 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24477 // or the media source does not contain this source buffer.
24478
24479 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24480 return;
24481 }
24482
24483 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
24484
24485 try {
24486 sourceBuffer.remove(start, end);
24487 } catch (e) {
24488 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
24489 }
24490 };
24491 },
24492 timestampOffset: function timestampOffset(offset) {
24493 return function (type, sourceUpdater) {
24494 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24495 // or the media source does not contain this source buffer.
24496
24497 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24498 return;
24499 }
24500
24501 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
24502 sourceBuffer.timestampOffset = offset;
24503 };
24504 },
24505 callback: function callback(_callback) {
24506 return function (type, sourceUpdater) {
24507 _callback();
24508 };
24509 },
24510 endOfStream: function endOfStream(error) {
24511 return function (sourceUpdater) {
24512 if (sourceUpdater.mediaSource.readyState !== 'open') {
24513 return;
24514 }
24515
24516 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
24517
24518 try {
24519 sourceUpdater.mediaSource.endOfStream(error);
24520 } catch (e) {
24521 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
24522 }
24523 };
24524 },
24525 duration: function duration(_duration) {
24526 return function (sourceUpdater) {
24527 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
24528
24529 try {
24530 sourceUpdater.mediaSource.duration = _duration;
24531 } catch (e) {
24532 videojs__default["default"].log.warn('Failed to set media source duration', e);
24533 }
24534 };
24535 },
24536 abort: function abort() {
24537 return function (type, sourceUpdater) {
24538 if (sourceUpdater.mediaSource.readyState !== 'open') {
24539 return;
24540 }
24541
24542 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24543 // or the media source does not contain this source buffer.
24544
24545 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24546 return;
24547 }
24548
24549 sourceUpdater.logger_("calling abort on " + type + "Buffer");
24550
24551 try {
24552 sourceBuffer.abort();
24553 } catch (e) {
24554 videojs__default["default"].log.warn("Failed to abort on " + type + "Buffer", e);
24555 }
24556 };
24557 },
24558 addSourceBuffer: function addSourceBuffer(type, codec) {
24559 return function (sourceUpdater) {
24560 var titleType = toTitleCase(type);
24561 var mime = getMimeForCodec(codec);
24562 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
24563 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
24564 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24565 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24566 sourceUpdater.codecs[type] = codec;
24567 sourceUpdater[type + "Buffer"] = sourceBuffer;
24568 };
24569 },
24570 removeSourceBuffer: function removeSourceBuffer(type) {
24571 return function (sourceUpdater) {
24572 var sourceBuffer = sourceUpdater[type + "Buffer"];
24573 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
24574 // or the media source does not contain this source buffer.
24575
24576 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24577 return;
24578 }
24579
24580 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
24581
24582 try {
24583 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
24584 } catch (e) {
24585 videojs__default["default"].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
24586 }
24587 };
24588 },
24589 changeType: function changeType(codec) {
24590 return function (type, sourceUpdater) {
24591 var sourceBuffer = sourceUpdater[type + "Buffer"];
24592 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
24593 // or the media source does not contain this source buffer.
24594
24595 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24596 return;
24597 } // do not update codec if we don't need to.
24598
24599
24600 if (sourceUpdater.codecs[type] === codec) {
24601 return;
24602 }
24603
24604 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
24605 sourceBuffer.changeType(mime);
24606 sourceUpdater.codecs[type] = codec;
24607 };
24608 }
24609 };
24610
24611 var pushQueue = function pushQueue(_ref) {
24612 var type = _ref.type,
24613 sourceUpdater = _ref.sourceUpdater,
24614 action = _ref.action,
24615 doneFn = _ref.doneFn,
24616 name = _ref.name;
24617 sourceUpdater.queue.push({
24618 type: type,
24619 action: action,
24620 doneFn: doneFn,
24621 name: name
24622 });
24623 shiftQueue(type, sourceUpdater);
24624 };
24625
24626 var onUpdateend = function onUpdateend(type, sourceUpdater) {
24627 return function (e) {
24628 // Although there should, in theory, be a pending action for any updateend receieved,
24629 // there are some actions that may trigger updateend events without set definitions in
24630 // the w3c spec. For instance, setting the duration on the media source may trigger
24631 // updateend events on source buffers. This does not appear to be in the spec. As such,
24632 // if we encounter an updateend without a corresponding pending action from our queue
24633 // for that source buffer type, process the next action.
24634 if (sourceUpdater.queuePending[type]) {
24635 var doneFn = sourceUpdater.queuePending[type].doneFn;
24636 sourceUpdater.queuePending[type] = null;
24637
24638 if (doneFn) {
24639 // if there's an error, report it
24640 doneFn(sourceUpdater[type + "Error_"]);
24641 }
24642 }
24643
24644 shiftQueue(type, sourceUpdater);
24645 };
24646 };
24647 /**
24648 * A queue of callbacks to be serialized and applied when a
24649 * MediaSource and its associated SourceBuffers are not in the
24650 * updating state. It is used by the segment loader to update the
24651 * underlying SourceBuffers when new data is loaded, for instance.
24652 *
24653 * @class SourceUpdater
24654 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
24655 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
24656 */
24657
24658
24659 var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
24660 inheritsLoose(SourceUpdater, _videojs$EventTarget);
24661
24662 function SourceUpdater(mediaSource) {
24663 var _this;
24664
24665 _this = _videojs$EventTarget.call(this) || this;
24666 _this.mediaSource = mediaSource;
24667
24668 _this.sourceopenListener_ = function () {
24669 return shiftQueue('mediaSource', assertThisInitialized(_this));
24670 };
24671
24672 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
24673
24674 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
24675
24676 _this.audioTimestampOffset_ = 0;
24677 _this.videoTimestampOffset_ = 0;
24678 _this.queue = [];
24679 _this.queuePending = {
24680 audio: null,
24681 video: null
24682 };
24683 _this.delayedAudioAppendQueue_ = [];
24684 _this.videoAppendQueued_ = false;
24685 _this.codecs = {};
24686 _this.onVideoUpdateEnd_ = onUpdateend('video', assertThisInitialized(_this));
24687 _this.onAudioUpdateEnd_ = onUpdateend('audio', assertThisInitialized(_this));
24688
24689 _this.onVideoError_ = function (e) {
24690 // used for debugging
24691 _this.videoError_ = e;
24692 };
24693
24694 _this.onAudioError_ = function (e) {
24695 // used for debugging
24696 _this.audioError_ = e;
24697 };
24698
24699 _this.createdSourceBuffers_ = false;
24700 _this.initializedEme_ = false;
24701 _this.triggeredReady_ = false;
24702 return _this;
24703 }
24704
24705 var _proto = SourceUpdater.prototype;
24706
24707 _proto.initializedEme = function initializedEme() {
24708 this.initializedEme_ = true;
24709 this.triggerReady();
24710 };
24711
24712 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
24713 // if false, likely waiting on one of the segment loaders to get enough data to create
24714 // source buffers
24715 return this.createdSourceBuffers_;
24716 };
24717
24718 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
24719 return this.initializedEme_;
24720 };
24721
24722 _proto.ready = function ready() {
24723 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
24724 };
24725
24726 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
24727 if (this.hasCreatedSourceBuffers()) {
24728 // already created them before
24729 return;
24730 } // the intial addOrChangeSourceBuffers will always be
24731 // two add buffers.
24732
24733
24734 this.addOrChangeSourceBuffers(codecs);
24735 this.createdSourceBuffers_ = true;
24736 this.trigger('createdsourcebuffers');
24737 this.triggerReady();
24738 };
24739
24740 _proto.triggerReady = function triggerReady() {
24741 // only allow ready to be triggered once, this prevents the case
24742 // where:
24743 // 1. we trigger createdsourcebuffers
24744 // 2. ie 11 synchronously initializates eme
24745 // 3. the synchronous initialization causes us to trigger ready
24746 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
24747 if (this.ready() && !this.triggeredReady_) {
24748 this.triggeredReady_ = true;
24749 this.trigger('ready');
24750 }
24751 }
24752 /**
24753 * Add a type of source buffer to the media source.
24754 *
24755 * @param {string} type
24756 * The type of source buffer to add.
24757 *
24758 * @param {string} codec
24759 * The codec to add the source buffer with.
24760 */
24761 ;
24762
24763 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
24764 pushQueue({
24765 type: 'mediaSource',
24766 sourceUpdater: this,
24767 action: actions.addSourceBuffer(type, codec),
24768 name: 'addSourceBuffer'
24769 });
24770 }
24771 /**
24772 * call abort on a source buffer.
24773 *
24774 * @param {string} type
24775 * The type of source buffer to call abort on.
24776 */
24777 ;
24778
24779 _proto.abort = function abort(type) {
24780 pushQueue({
24781 type: type,
24782 sourceUpdater: this,
24783 action: actions.abort(type),
24784 name: 'abort'
24785 });
24786 }
24787 /**
24788 * Call removeSourceBuffer and remove a specific type
24789 * of source buffer on the mediaSource.
24790 *
24791 * @param {string} type
24792 * The type of source buffer to remove.
24793 */
24794 ;
24795
24796 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
24797 if (!this.canRemoveSourceBuffer()) {
24798 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
24799 return;
24800 }
24801
24802 pushQueue({
24803 type: 'mediaSource',
24804 sourceUpdater: this,
24805 action: actions.removeSourceBuffer(type),
24806 name: 'removeSourceBuffer'
24807 });
24808 }
24809 /**
24810 * Whether or not the removeSourceBuffer function is supported
24811 * on the mediaSource.
24812 *
24813 * @return {boolean}
24814 * if removeSourceBuffer can be called.
24815 */
24816 ;
24817
24818 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
24819 // IE reports that it supports removeSourceBuffer, but often throws
24820 // errors when attempting to use the function. So we report that it
24821 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
24822 // throws errors, so we report that it does not support this as well.
24823 return !videojs__default["default"].browser.IE_VERSION && !videojs__default["default"].browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
24824 }
24825 /**
24826 * Whether or not the changeType function is supported
24827 * on our SourceBuffers.
24828 *
24829 * @return {boolean}
24830 * if changeType can be called.
24831 */
24832 ;
24833
24834 SourceUpdater.canChangeType = function canChangeType() {
24835 return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';
24836 }
24837 /**
24838 * Whether or not the changeType function is supported
24839 * on our SourceBuffers.
24840 *
24841 * @return {boolean}
24842 * if changeType can be called.
24843 */
24844 ;
24845
24846 _proto.canChangeType = function canChangeType() {
24847 return this.constructor.canChangeType();
24848 }
24849 /**
24850 * Call the changeType function on a source buffer, given the code and type.
24851 *
24852 * @param {string} type
24853 * The type of source buffer to call changeType on.
24854 *
24855 * @param {string} codec
24856 * The codec string to change type with on the source buffer.
24857 */
24858 ;
24859
24860 _proto.changeType = function changeType(type, codec) {
24861 if (!this.canChangeType()) {
24862 videojs__default["default"].log.error('changeType is not supported!');
24863 return;
24864 }
24865
24866 pushQueue({
24867 type: type,
24868 sourceUpdater: this,
24869 action: actions.changeType(codec),
24870 name: 'changeType'
24871 });
24872 }
24873 /**
24874 * Add source buffers with a codec or, if they are already created,
24875 * call changeType on source buffers using changeType.
24876 *
24877 * @param {Object} codecs
24878 * Codecs to switch to
24879 */
24880 ;
24881
24882 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
24883 var _this2 = this;
24884
24885 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
24886 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
24887 }
24888
24889 Object.keys(codecs).forEach(function (type) {
24890 var codec = codecs[type];
24891
24892 if (!_this2.hasCreatedSourceBuffers()) {
24893 return _this2.addSourceBuffer(type, codec);
24894 }
24895
24896 if (_this2.canChangeType()) {
24897 _this2.changeType(type, codec);
24898 }
24899 });
24900 }
24901 /**
24902 * Queue an update to append an ArrayBuffer.
24903 *
24904 * @param {MediaObject} object containing audioBytes and/or videoBytes
24905 * @param {Function} done the function to call when done
24906 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
24907 */
24908 ;
24909
24910 _proto.appendBuffer = function appendBuffer(options, doneFn) {
24911 var _this3 = this;
24912
24913 var segmentInfo = options.segmentInfo,
24914 type = options.type,
24915 bytes = options.bytes;
24916 this.processedAppend_ = true;
24917
24918 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
24919 this.delayedAudioAppendQueue_.push([options, doneFn]);
24920 this.logger_("delayed audio append of " + bytes.length + " until video append");
24921 return;
24922 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
24923 // not be fired. This means that the queue will be blocked until the next action
24924 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
24925 // these errors by calling the doneFn with the specific error.
24926
24927
24928 var onError = doneFn;
24929 pushQueue({
24930 type: type,
24931 sourceUpdater: this,
24932 action: actions.appendBuffer(bytes, segmentInfo || {
24933 mediaIndex: -1
24934 }, onError),
24935 doneFn: doneFn,
24936 name: 'appendBuffer'
24937 });
24938
24939 if (type === 'video') {
24940 this.videoAppendQueued_ = true;
24941
24942 if (!this.delayedAudioAppendQueue_.length) {
24943 return;
24944 }
24945
24946 var queue = this.delayedAudioAppendQueue_.slice();
24947 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
24948 this.delayedAudioAppendQueue_.length = 0;
24949 queue.forEach(function (que) {
24950 _this3.appendBuffer.apply(_this3, que);
24951 });
24952 }
24953 }
24954 /**
24955 * Get the audio buffer's buffered timerange.
24956 *
24957 * @return {TimeRange}
24958 * The audio buffer's buffered time range
24959 */
24960 ;
24961
24962 _proto.audioBuffered = function audioBuffered() {
24963 // no media source/source buffer or it isn't in the media sources
24964 // source buffer list
24965 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
24966 return videojs__default["default"].createTimeRange();
24967 }
24968
24969 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default["default"].createTimeRange();
24970 }
24971 /**
24972 * Get the video buffer's buffered timerange.
24973 *
24974 * @return {TimeRange}
24975 * The video buffer's buffered time range
24976 */
24977 ;
24978
24979 _proto.videoBuffered = function videoBuffered() {
24980 // no media source/source buffer or it isn't in the media sources
24981 // source buffer list
24982 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
24983 return videojs__default["default"].createTimeRange();
24984 }
24985
24986 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default["default"].createTimeRange();
24987 }
24988 /**
24989 * Get a combined video/audio buffer's buffered timerange.
24990 *
24991 * @return {TimeRange}
24992 * the combined time range
24993 */
24994 ;
24995
24996 _proto.buffered = function buffered() {
24997 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
24998 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
24999
25000 if (audio && !video) {
25001 return this.audioBuffered();
25002 }
25003
25004 if (video && !audio) {
25005 return this.videoBuffered();
25006 }
25007
25008 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
25009 }
25010 /**
25011 * Add a callback to the queue that will set duration on the mediaSource.
25012 *
25013 * @param {number} duration
25014 * The duration to set
25015 *
25016 * @param {Function} [doneFn]
25017 * function to run after duration has been set.
25018 */
25019 ;
25020
25021 _proto.setDuration = function setDuration(duration, doneFn) {
25022 if (doneFn === void 0) {
25023 doneFn = noop;
25024 }
25025
25026 // In order to set the duration on the media source, it's necessary to wait for all
25027 // source buffers to no longer be updating. "If the updating attribute equals true on
25028 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25029 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25030 pushQueue({
25031 type: 'mediaSource',
25032 sourceUpdater: this,
25033 action: actions.duration(duration),
25034 name: 'duration',
25035 doneFn: doneFn
25036 });
25037 }
25038 /**
25039 * Add a mediaSource endOfStream call to the queue
25040 *
25041 * @param {Error} [error]
25042 * Call endOfStream with an error
25043 *
25044 * @param {Function} [doneFn]
25045 * A function that should be called when the
25046 * endOfStream call has finished.
25047 */
25048 ;
25049
25050 _proto.endOfStream = function endOfStream(error, doneFn) {
25051 if (error === void 0) {
25052 error = null;
25053 }
25054
25055 if (doneFn === void 0) {
25056 doneFn = noop;
25057 }
25058
25059 if (typeof error !== 'string') {
25060 error = undefined;
25061 } // In order to set the duration on the media source, it's necessary to wait for all
25062 // source buffers to no longer be updating. "If the updating attribute equals true on
25063 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25064 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25065
25066
25067 pushQueue({
25068 type: 'mediaSource',
25069 sourceUpdater: this,
25070 action: actions.endOfStream(error),
25071 name: 'endOfStream',
25072 doneFn: doneFn
25073 });
25074 }
25075 /**
25076 * Queue an update to remove a time range from the buffer.
25077 *
25078 * @param {number} start where to start the removal
25079 * @param {number} end where to end the removal
25080 * @param {Function} [done=noop] optional callback to be executed when the remove
25081 * operation is complete
25082 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25083 */
25084 ;
25085
25086 _proto.removeAudio = function removeAudio(start, end, done) {
25087 if (done === void 0) {
25088 done = noop;
25089 }
25090
25091 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
25092 done();
25093 return;
25094 }
25095
25096 pushQueue({
25097 type: 'audio',
25098 sourceUpdater: this,
25099 action: actions.remove(start, end),
25100 doneFn: done,
25101 name: 'remove'
25102 });
25103 }
25104 /**
25105 * Queue an update to remove a time range from the buffer.
25106 *
25107 * @param {number} start where to start the removal
25108 * @param {number} end where to end the removal
25109 * @param {Function} [done=noop] optional callback to be executed when the remove
25110 * operation is complete
25111 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25112 */
25113 ;
25114
25115 _proto.removeVideo = function removeVideo(start, end, done) {
25116 if (done === void 0) {
25117 done = noop;
25118 }
25119
25120 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
25121 done();
25122 return;
25123 }
25124
25125 pushQueue({
25126 type: 'video',
25127 sourceUpdater: this,
25128 action: actions.remove(start, end),
25129 doneFn: done,
25130 name: 'remove'
25131 });
25132 }
25133 /**
25134 * Whether the underlying sourceBuffer is updating or not
25135 *
25136 * @return {boolean} the updating status of the SourceBuffer
25137 */
25138 ;
25139
25140 _proto.updating = function updating() {
25141 // the audio/video source buffer is updating
25142 if (_updating('audio', this) || _updating('video', this)) {
25143 return true;
25144 }
25145
25146 return false;
25147 }
25148 /**
25149 * Set/get the timestampoffset on the audio SourceBuffer
25150 *
25151 * @return {number} the timestamp offset
25152 */
25153 ;
25154
25155 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
25156 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
25157 this.audioTimestampOffset_ !== offset) {
25158 pushQueue({
25159 type: 'audio',
25160 sourceUpdater: this,
25161 action: actions.timestampOffset(offset),
25162 name: 'timestampOffset'
25163 });
25164 this.audioTimestampOffset_ = offset;
25165 }
25166
25167 return this.audioTimestampOffset_;
25168 }
25169 /**
25170 * Set/get the timestampoffset on the video SourceBuffer
25171 *
25172 * @return {number} the timestamp offset
25173 */
25174 ;
25175
25176 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
25177 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
25178 this.videoTimestampOffset !== offset) {
25179 pushQueue({
25180 type: 'video',
25181 sourceUpdater: this,
25182 action: actions.timestampOffset(offset),
25183 name: 'timestampOffset'
25184 });
25185 this.videoTimestampOffset_ = offset;
25186 }
25187
25188 return this.videoTimestampOffset_;
25189 }
25190 /**
25191 * Add a function to the queue that will be called
25192 * when it is its turn to run in the audio queue.
25193 *
25194 * @param {Function} callback
25195 * The callback to queue.
25196 */
25197 ;
25198
25199 _proto.audioQueueCallback = function audioQueueCallback(callback) {
25200 if (!this.audioBuffer) {
25201 return;
25202 }
25203
25204 pushQueue({
25205 type: 'audio',
25206 sourceUpdater: this,
25207 action: actions.callback(callback),
25208 name: 'callback'
25209 });
25210 }
25211 /**
25212 * Add a function to the queue that will be called
25213 * when it is its turn to run in the video queue.
25214 *
25215 * @param {Function} callback
25216 * The callback to queue.
25217 */
25218 ;
25219
25220 _proto.videoQueueCallback = function videoQueueCallback(callback) {
25221 if (!this.videoBuffer) {
25222 return;
25223 }
25224
25225 pushQueue({
25226 type: 'video',
25227 sourceUpdater: this,
25228 action: actions.callback(callback),
25229 name: 'callback'
25230 });
25231 }
25232 /**
25233 * dispose of the source updater and the underlying sourceBuffer
25234 */
25235 ;
25236
25237 _proto.dispose = function dispose() {
25238 var _this4 = this;
25239
25240 this.trigger('dispose');
25241 bufferTypes.forEach(function (type) {
25242 _this4.abort(type);
25243
25244 if (_this4.canRemoveSourceBuffer()) {
25245 _this4.removeSourceBuffer(type);
25246 } else {
25247 _this4[type + "QueueCallback"](function () {
25248 return cleanupBuffer(type, _this4);
25249 });
25250 }
25251 });
25252 this.videoAppendQueued_ = false;
25253 this.delayedAudioAppendQueue_.length = 0;
25254
25255 if (this.sourceopenListener_) {
25256 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
25257 }
25258
25259 this.off();
25260 };
25261
25262 return SourceUpdater;
25263 }(videojs__default["default"].EventTarget);
25264
25265 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
25266 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
25267 };
25268
25269 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
25270 return char.charCodeAt(0);
25271 }));
25272 /**
25273 * An object that manages segment loading and appending.
25274 *
25275 * @class VTTSegmentLoader
25276 * @param {Object} options required and optional options
25277 * @extends videojs.EventTarget
25278 */
25279
25280 var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
25281 inheritsLoose(VTTSegmentLoader, _SegmentLoader);
25282
25283 function VTTSegmentLoader(settings, options) {
25284 var _this;
25285
25286 if (options === void 0) {
25287 options = {};
25288 }
25289
25290 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
25291 // however, VTTSegmentLoader has no need of a media source, so delete the reference
25292
25293 _this.mediaSource_ = null;
25294 _this.subtitlesTrack_ = null;
25295 _this.loaderType_ = 'subtitle';
25296 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
25297 // the sync controller leads to improper behavior.
25298
25299 _this.shouldSaveSegmentTimingInfo_ = false;
25300 return _this;
25301 }
25302
25303 var _proto = VTTSegmentLoader.prototype;
25304
25305 _proto.createTransmuxer_ = function createTransmuxer_() {
25306 // don't need to transmux any subtitles
25307 return null;
25308 }
25309 /**
25310 * Indicates which time ranges are buffered
25311 *
25312 * @return {TimeRange}
25313 * TimeRange object representing the current buffered ranges
25314 */
25315 ;
25316
25317 _proto.buffered_ = function buffered_() {
25318 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
25319 return videojs__default["default"].createTimeRanges();
25320 }
25321
25322 var cues = this.subtitlesTrack_.cues;
25323 var start = cues[0].startTime;
25324 var end = cues[cues.length - 1].startTime;
25325 return videojs__default["default"].createTimeRanges([[start, end]]);
25326 }
25327 /**
25328 * Gets and sets init segment for the provided map
25329 *
25330 * @param {Object} map
25331 * The map object representing the init segment to get or set
25332 * @param {boolean=} set
25333 * If true, the init segment for the provided map should be saved
25334 * @return {Object}
25335 * map object for desired init segment
25336 */
25337 ;
25338
25339 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
25340 if (set === void 0) {
25341 set = false;
25342 }
25343
25344 if (!map) {
25345 return null;
25346 }
25347
25348 var id = initSegmentId(map);
25349 var storedMap = this.initSegments_[id];
25350
25351 if (set && !storedMap && map.bytes) {
25352 // append WebVTT line terminators to the media initialization segment if it exists
25353 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
25354 // requires two or more WebVTT line terminators between the WebVTT header and the
25355 // rest of the file
25356 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
25357 var combinedSegment = new Uint8Array(combinedByteLength);
25358 combinedSegment.set(map.bytes);
25359 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
25360 this.initSegments_[id] = storedMap = {
25361 resolvedUri: map.resolvedUri,
25362 byterange: map.byterange,
25363 bytes: combinedSegment
25364 };
25365 }
25366
25367 return storedMap || map;
25368 }
25369 /**
25370 * Returns true if all configuration required for loading is present, otherwise false.
25371 *
25372 * @return {boolean} True if the all configuration is ready for loading
25373 * @private
25374 */
25375 ;
25376
25377 _proto.couldBeginLoading_ = function couldBeginLoading_() {
25378 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
25379 }
25380 /**
25381 * Once all the starting parameters have been specified, begin
25382 * operation. This method should only be invoked from the INIT
25383 * state.
25384 *
25385 * @private
25386 */
25387 ;
25388
25389 _proto.init_ = function init_() {
25390 this.state = 'READY';
25391 this.resetEverything();
25392 return this.monitorBuffer_();
25393 }
25394 /**
25395 * Set a subtitle track on the segment loader to add subtitles to
25396 *
25397 * @param {TextTrack=} track
25398 * The text track to add loaded subtitles to
25399 * @return {TextTrack}
25400 * Returns the subtitles track
25401 */
25402 ;
25403
25404 _proto.track = function track(_track) {
25405 if (typeof _track === 'undefined') {
25406 return this.subtitlesTrack_;
25407 }
25408
25409 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
25410 // buffering now
25411
25412 if (this.state === 'INIT' && this.couldBeginLoading_()) {
25413 this.init_();
25414 }
25415
25416 return this.subtitlesTrack_;
25417 }
25418 /**
25419 * Remove any data in the source buffer between start and end times
25420 *
25421 * @param {number} start - the start time of the region to remove from the buffer
25422 * @param {number} end - the end time of the region to remove from the buffer
25423 */
25424 ;
25425
25426 _proto.remove = function remove(start, end) {
25427 removeCuesFromTrack(start, end, this.subtitlesTrack_);
25428 }
25429 /**
25430 * fill the buffer with segements unless the sourceBuffers are
25431 * currently updating
25432 *
25433 * Note: this function should only ever be called by monitorBuffer_
25434 * and never directly
25435 *
25436 * @private
25437 */
25438 ;
25439
25440 _proto.fillBuffer_ = function fillBuffer_() {
25441 var _this2 = this;
25442
25443 // see if we need to begin loading immediately
25444 var segmentInfo = this.chooseNextRequest_();
25445
25446 if (!segmentInfo) {
25447 return;
25448 }
25449
25450 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
25451 // We don't have the timestamp offset that we need to sync subtitles.
25452 // Rerun on a timestamp offset or user interaction.
25453 var checkTimestampOffset = function checkTimestampOffset() {
25454 _this2.state = 'READY';
25455
25456 if (!_this2.paused()) {
25457 // if not paused, queue a buffer check as soon as possible
25458 _this2.monitorBuffer_();
25459 }
25460 };
25461
25462 this.syncController_.one('timestampoffset', checkTimestampOffset);
25463 this.state = 'WAITING_ON_TIMELINE';
25464 return;
25465 }
25466
25467 this.loadSegment_(segmentInfo);
25468 } // never set a timestamp offset for vtt segments.
25469 ;
25470
25471 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
25472 return null;
25473 };
25474
25475 _proto.chooseNextRequest_ = function chooseNextRequest_() {
25476 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
25477 }
25478 /**
25479 * Prevents the segment loader from requesting segments we know contain no subtitles
25480 * by walking forward until we find the next segment that we don't know whether it is
25481 * empty or not.
25482 *
25483 * @param {Object} segmentInfo
25484 * a segment info object that describes the current segment
25485 * @return {Object}
25486 * a segment info object that describes the current segment
25487 */
25488 ;
25489
25490 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
25491 while (segmentInfo && segmentInfo.segment.empty) {
25492 // stop at the last possible segmentInfo
25493 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
25494 segmentInfo = null;
25495 break;
25496 }
25497
25498 segmentInfo = this.generateSegmentInfo_({
25499 playlist: segmentInfo.playlist,
25500 mediaIndex: segmentInfo.mediaIndex + 1,
25501 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
25502 isSyncRequest: segmentInfo.isSyncRequest
25503 });
25504 }
25505
25506 return segmentInfo;
25507 };
25508
25509 _proto.stopForError = function stopForError(error) {
25510 this.error(error);
25511 this.state = 'READY';
25512 this.pause();
25513 this.trigger('error');
25514 }
25515 /**
25516 * append a decrypted segement to the SourceBuffer through a SourceUpdater
25517 *
25518 * @private
25519 */
25520 ;
25521
25522 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
25523 var _this3 = this;
25524
25525 if (!this.subtitlesTrack_) {
25526 this.state = 'READY';
25527 return;
25528 }
25529
25530 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
25531
25532 if (!this.pendingSegment_) {
25533 this.state = 'READY';
25534 this.mediaRequestsAborted += 1;
25535 return;
25536 }
25537
25538 if (error) {
25539 if (error.code === REQUEST_ERRORS.TIMEOUT) {
25540 this.handleTimeout_();
25541 }
25542
25543 if (error.code === REQUEST_ERRORS.ABORTED) {
25544 this.mediaRequestsAborted += 1;
25545 } else {
25546 this.mediaRequestsErrored += 1;
25547 }
25548
25549 this.stopForError(error);
25550 return;
25551 }
25552
25553 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
25554 // maintain functionality between segment loaders
25555
25556 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
25557 this.state = 'APPENDING'; // used for tests
25558
25559 this.trigger('appending');
25560 var segment = segmentInfo.segment;
25561
25562 if (segment.map) {
25563 segment.map.bytes = simpleSegment.map.bytes;
25564 }
25565
25566 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
25567
25568 if (typeof window.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
25569 var loadHandler;
25570
25571 var errorHandler = function errorHandler() {
25572 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
25573
25574 _this3.stopForError({
25575 message: 'Error loading vtt.js'
25576 });
25577
25578 return;
25579 };
25580
25581 loadHandler = function loadHandler() {
25582 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
25583
25584 _this3.segmentRequestFinished_(error, simpleSegment, result);
25585 };
25586
25587 this.state = 'WAITING_ON_VTTJS';
25588 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
25589 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
25590 return;
25591 }
25592
25593 segment.requested = true;
25594
25595 try {
25596 this.parseVTTCues_(segmentInfo);
25597 } catch (e) {
25598 this.stopForError({
25599 message: e.message
25600 });
25601 return;
25602 }
25603
25604 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
25605
25606 if (segmentInfo.cues.length) {
25607 segmentInfo.timingInfo = {
25608 start: segmentInfo.cues[0].startTime,
25609 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
25610 };
25611 } else {
25612 segmentInfo.timingInfo = {
25613 start: segmentInfo.startOfSegment,
25614 end: segmentInfo.startOfSegment + segmentInfo.duration
25615 };
25616 }
25617
25618 if (segmentInfo.isSyncRequest) {
25619 this.trigger('syncinfoupdate');
25620 this.pendingSegment_ = null;
25621 this.state = 'READY';
25622 return;
25623 }
25624
25625 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
25626 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
25627 // the subtitle track
25628
25629 segmentInfo.cues.forEach(function (cue) {
25630 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
25631 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
25632 // cues to have identical time-intervals, but if the text is also identical
25633 // we can safely assume it is a duplicate that can be removed (ex. when a cue
25634 // "overlaps" VTT segments)
25635
25636 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
25637 this.handleAppendsDone_();
25638 };
25639
25640 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
25641 // that we do not support here.
25642 };
25643
25644 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
25645 }
25646 /**
25647 * Uses the WebVTT parser to parse the segment response
25648 *
25649 * @param {Object} segmentInfo
25650 * a segment info object that describes the current segment
25651 * @private
25652 */
25653 ;
25654
25655 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
25656 var decoder;
25657 var decodeBytesToString = false;
25658
25659 if (typeof window.TextDecoder === 'function') {
25660 decoder = new window.TextDecoder('utf8');
25661 } else {
25662 decoder = window.WebVTT.StringDecoder();
25663 decodeBytesToString = true;
25664 }
25665
25666 var parser = new window.WebVTT.Parser(window, window.vttjs, decoder);
25667 segmentInfo.cues = [];
25668 segmentInfo.timestampmap = {
25669 MPEGTS: 0,
25670 LOCAL: 0
25671 };
25672 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
25673
25674 parser.ontimestampmap = function (map) {
25675 segmentInfo.timestampmap = map;
25676 };
25677
25678 parser.onparsingerror = function (error) {
25679 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
25680 };
25681
25682 if (segmentInfo.segment.map) {
25683 var mapData = segmentInfo.segment.map.bytes;
25684
25685 if (decodeBytesToString) {
25686 mapData = uint8ToUtf8(mapData);
25687 }
25688
25689 parser.parse(mapData);
25690 }
25691
25692 var segmentData = segmentInfo.bytes;
25693
25694 if (decodeBytesToString) {
25695 segmentData = uint8ToUtf8(segmentData);
25696 }
25697
25698 parser.parse(segmentData);
25699 parser.flush();
25700 }
25701 /**
25702 * Updates the start and end times of any cues parsed by the WebVTT parser using
25703 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
25704 * from the SyncController
25705 *
25706 * @param {Object} segmentInfo
25707 * a segment info object that describes the current segment
25708 * @param {Object} mappingObj
25709 * object containing a mapping from TS to media time
25710 * @param {Object} playlist
25711 * the playlist object containing the segment
25712 * @private
25713 */
25714 ;
25715
25716 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
25717 var segment = segmentInfo.segment;
25718
25719 if (!mappingObj) {
25720 // If the sync controller does not have a mapping of TS to Media Time for the
25721 // timeline, then we don't have enough information to update the cue
25722 // start/end times
25723 return;
25724 }
25725
25726 if (!segmentInfo.cues.length) {
25727 // If there are no cues, we also do not have enough information to figure out
25728 // segment timing. Mark that the segment contains no cues so we don't re-request
25729 // an empty segment.
25730 segment.empty = true;
25731 return;
25732 }
25733
25734 var timestampmap = segmentInfo.timestampmap;
25735 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
25736 segmentInfo.cues.forEach(function (cue) {
25737 // First convert cue time to TS time using the timestamp-map provided within the vtt
25738 cue.startTime += diff;
25739 cue.endTime += diff;
25740 });
25741
25742 if (!playlist.syncInfo) {
25743 var firstStart = segmentInfo.cues[0].startTime;
25744 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
25745 playlist.syncInfo = {
25746 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
25747 time: Math.min(firstStart, lastStart - segment.duration)
25748 };
25749 }
25750 };
25751
25752 return VTTSegmentLoader;
25753 }(SegmentLoader);
25754
25755 /**
25756 * @file ad-cue-tags.js
25757 */
25758
25759 /**
25760 * Searches for an ad cue that overlaps with the given mediaTime
25761 *
25762 * @param {Object} track
25763 * the track to find the cue for
25764 *
25765 * @param {number} mediaTime
25766 * the time to find the cue at
25767 *
25768 * @return {Object|null}
25769 * the found cue or null
25770 */
25771 var findAdCue = function findAdCue(track, mediaTime) {
25772 var cues = track.cues;
25773
25774 for (var i = 0; i < cues.length; i++) {
25775 var cue = cues[i];
25776
25777 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
25778 return cue;
25779 }
25780 }
25781
25782 return null;
25783 };
25784 var updateAdCues = function updateAdCues(media, track, offset) {
25785 if (offset === void 0) {
25786 offset = 0;
25787 }
25788
25789 if (!media.segments) {
25790 return;
25791 }
25792
25793 var mediaTime = offset;
25794 var cue;
25795
25796 for (var i = 0; i < media.segments.length; i++) {
25797 var segment = media.segments[i];
25798
25799 if (!cue) {
25800 // Since the cues will span for at least the segment duration, adding a fudge
25801 // factor of half segment duration will prevent duplicate cues from being
25802 // created when timing info is not exact (e.g. cue start time initialized
25803 // at 10.006677, but next call mediaTime is 10.003332 )
25804 cue = findAdCue(track, mediaTime + segment.duration / 2);
25805 }
25806
25807 if (cue) {
25808 if ('cueIn' in segment) {
25809 // Found a CUE-IN so end the cue
25810 cue.endTime = mediaTime;
25811 cue.adEndTime = mediaTime;
25812 mediaTime += segment.duration;
25813 cue = null;
25814 continue;
25815 }
25816
25817 if (mediaTime < cue.endTime) {
25818 // Already processed this mediaTime for this cue
25819 mediaTime += segment.duration;
25820 continue;
25821 } // otherwise extend cue until a CUE-IN is found
25822
25823
25824 cue.endTime += segment.duration;
25825 } else {
25826 if ('cueOut' in segment) {
25827 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
25828 cue.adStartTime = mediaTime; // Assumes tag format to be
25829 // #EXT-X-CUE-OUT:30
25830
25831 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
25832 track.addCue(cue);
25833 }
25834
25835 if ('cueOutCont' in segment) {
25836 // Entered into the middle of an ad cue
25837 // Assumes tag formate to be
25838 // #EXT-X-CUE-OUT-CONT:10/30
25839 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
25840 adOffset = _segment$cueOutCont$s[0],
25841 adTotal = _segment$cueOutCont$s[1];
25842
25843 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');
25844 cue.adStartTime = mediaTime - adOffset;
25845 cue.adEndTime = cue.adStartTime + adTotal;
25846 track.addCue(cue);
25847 }
25848 }
25849
25850 mediaTime += segment.duration;
25851 }
25852 };
25853
25854 // synchronize expired playlist segments.
25855 // the max media sequence diff is 48 hours of live stream
25856 // content with two second segments. Anything larger than that
25857 // will likely be invalid.
25858
25859 var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
25860 var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
25861 // the equivalence display-time 0 === segment-index 0
25862 {
25863 name: 'VOD',
25864 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25865 if (duration !== Infinity) {
25866 var syncPoint = {
25867 time: 0,
25868 segmentIndex: 0,
25869 partIndex: null
25870 };
25871 return syncPoint;
25872 }
25873
25874 return null;
25875 }
25876 }, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
25877 {
25878 name: 'ProgramDateTime',
25879 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25880 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
25881 return null;
25882 }
25883
25884 var syncPoint = null;
25885 var lastDistance = null;
25886 var partsAndSegments = getPartsAndSegments(playlist);
25887 currentTime = currentTime || 0;
25888
25889 for (var i = 0; i < partsAndSegments.length; i++) {
25890 // start from the end and loop backwards for live
25891 // or start from the front and loop forwards for non-live
25892 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
25893 var partAndSegment = partsAndSegments[index];
25894 var segment = partAndSegment.segment;
25895 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
25896
25897 if (!datetimeMapping || !segment.dateTimeObject) {
25898 continue;
25899 }
25900
25901 var segmentTime = segment.dateTimeObject.getTime() / 1000;
25902 var start = segmentTime + datetimeMapping; // take part duration into account.
25903
25904 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
25905 for (var z = 0; z < partAndSegment.partIndex; z++) {
25906 start += segment.parts[z].duration;
25907 }
25908 }
25909
25910 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
25911 // currentTime and can stop looking for better candidates
25912
25913 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
25914 break;
25915 }
25916
25917 lastDistance = distance;
25918 syncPoint = {
25919 time: start,
25920 segmentIndex: partAndSegment.segmentIndex,
25921 partIndex: partAndSegment.partIndex
25922 };
25923 }
25924
25925 return syncPoint;
25926 }
25927 }, // Stategy "Segment": We have a known time mapping for a timeline and a
25928 // segment in the current timeline with timing data
25929 {
25930 name: 'Segment',
25931 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25932 var syncPoint = null;
25933 var lastDistance = null;
25934 currentTime = currentTime || 0;
25935 var partsAndSegments = getPartsAndSegments(playlist);
25936
25937 for (var i = 0; i < partsAndSegments.length; i++) {
25938 // start from the end and loop backwards for live
25939 // or start from the front and loop forwards for non-live
25940 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
25941 var partAndSegment = partsAndSegments[index];
25942 var segment = partAndSegment.segment;
25943 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
25944
25945 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
25946 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
25947 // currentTime and can stop looking for better candidates
25948
25949 if (lastDistance !== null && lastDistance < distance) {
25950 break;
25951 }
25952
25953 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
25954 lastDistance = distance;
25955 syncPoint = {
25956 time: start,
25957 segmentIndex: partAndSegment.segmentIndex,
25958 partIndex: partAndSegment.partIndex
25959 };
25960 }
25961 }
25962 }
25963
25964 return syncPoint;
25965 }
25966 }, // Stategy "Discontinuity": We have a discontinuity with a known
25967 // display-time
25968 {
25969 name: 'Discontinuity',
25970 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
25971 var syncPoint = null;
25972 currentTime = currentTime || 0;
25973
25974 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
25975 var lastDistance = null;
25976
25977 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
25978 var segmentIndex = playlist.discontinuityStarts[i];
25979 var discontinuity = playlist.discontinuitySequence + i + 1;
25980 var discontinuitySync = syncController.discontinuities[discontinuity];
25981
25982 if (discontinuitySync) {
25983 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
25984 // currentTime and can stop looking for better candidates
25985
25986 if (lastDistance !== null && lastDistance < distance) {
25987 break;
25988 }
25989
25990 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
25991 lastDistance = distance;
25992 syncPoint = {
25993 time: discontinuitySync.time,
25994 segmentIndex: segmentIndex,
25995 partIndex: null
25996 };
25997 }
25998 }
25999 }
26000 }
26001
26002 return syncPoint;
26003 }
26004 }, // Stategy "Playlist": We have a playlist with a known mapping of
26005 // segment index to display time
26006 {
26007 name: 'Playlist',
26008 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26009 if (playlist.syncInfo) {
26010 var syncPoint = {
26011 time: playlist.syncInfo.time,
26012 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
26013 partIndex: null
26014 };
26015 return syncPoint;
26016 }
26017
26018 return null;
26019 }
26020 }];
26021
26022 var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
26023 inheritsLoose(SyncController, _videojs$EventTarget);
26024
26025 function SyncController(options) {
26026 var _this;
26027
26028 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
26029
26030 _this.timelines = [];
26031 _this.discontinuities = [];
26032 _this.timelineToDatetimeMappings = {};
26033 _this.logger_ = logger('SyncController');
26034 return _this;
26035 }
26036 /**
26037 * Find a sync-point for the playlist specified
26038 *
26039 * A sync-point is defined as a known mapping from display-time to
26040 * a segment-index in the current playlist.
26041 *
26042 * @param {Playlist} playlist
26043 * The playlist that needs a sync-point
26044 * @param {number} duration
26045 * Duration of the MediaSource (Infinite if playing a live source)
26046 * @param {number} currentTimeline
26047 * The last timeline from which a segment was loaded
26048 * @return {Object}
26049 * A sync-point object
26050 */
26051
26052
26053 var _proto = SyncController.prototype;
26054
26055 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
26056 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
26057
26058 if (!syncPoints.length) {
26059 // Signal that we need to attempt to get a sync-point manually
26060 // by fetching a segment in the playlist and constructing
26061 // a sync-point from that information
26062 return null;
26063 } // Now find the sync-point that is closest to the currentTime because
26064 // that should result in the most accurate guess about which segment
26065 // to fetch
26066
26067
26068 return this.selectSyncPoint_(syncPoints, {
26069 key: 'time',
26070 value: currentTime
26071 });
26072 }
26073 /**
26074 * Calculate the amount of time that has expired off the playlist during playback
26075 *
26076 * @param {Playlist} playlist
26077 * Playlist object to calculate expired from
26078 * @param {number} duration
26079 * Duration of the MediaSource (Infinity if playling a live source)
26080 * @return {number|null}
26081 * The amount of time that has expired off the playlist during playback. Null
26082 * if no sync-points for the playlist can be found.
26083 */
26084 ;
26085
26086 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
26087 if (!playlist || !playlist.segments) {
26088 return null;
26089 }
26090
26091 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
26092
26093 if (!syncPoints.length) {
26094 return null;
26095 }
26096
26097 var syncPoint = this.selectSyncPoint_(syncPoints, {
26098 key: 'segmentIndex',
26099 value: 0
26100 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
26101 // duration from index 0 to syncPoint.segmentIndex instead of adding.
26102
26103 if (syncPoint.segmentIndex > 0) {
26104 syncPoint.time *= -1;
26105 }
26106
26107 return Math.abs(syncPoint.time + sumDurations({
26108 defaultDuration: playlist.targetDuration,
26109 durationList: playlist.segments,
26110 startIndex: syncPoint.segmentIndex,
26111 endIndex: 0
26112 }));
26113 }
26114 /**
26115 * Runs each sync-point strategy and returns a list of sync-points returned by the
26116 * strategies
26117 *
26118 * @private
26119 * @param {Playlist} playlist
26120 * The playlist that needs a sync-point
26121 * @param {number} duration
26122 * Duration of the MediaSource (Infinity if playing a live source)
26123 * @param {number} currentTimeline
26124 * The last timeline from which a segment was loaded
26125 * @return {Array}
26126 * A list of sync-point objects
26127 */
26128 ;
26129
26130 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
26131 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
26132
26133 for (var i = 0; i < syncPointStrategies.length; i++) {
26134 var strategy = syncPointStrategies[i];
26135 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
26136
26137 if (syncPoint) {
26138 syncPoint.strategy = strategy.name;
26139 syncPoints.push({
26140 strategy: strategy.name,
26141 syncPoint: syncPoint
26142 });
26143 }
26144 }
26145
26146 return syncPoints;
26147 }
26148 /**
26149 * Selects the sync-point nearest the specified target
26150 *
26151 * @private
26152 * @param {Array} syncPoints
26153 * List of sync-points to select from
26154 * @param {Object} target
26155 * Object specifying the property and value we are targeting
26156 * @param {string} target.key
26157 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
26158 * @param {number} target.value
26159 * The value to target for the specified key.
26160 * @return {Object}
26161 * The sync-point nearest the target
26162 */
26163 ;
26164
26165 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
26166 var bestSyncPoint = syncPoints[0].syncPoint;
26167 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
26168 var bestStrategy = syncPoints[0].strategy;
26169
26170 for (var i = 1; i < syncPoints.length; i++) {
26171 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
26172
26173 if (newDistance < bestDistance) {
26174 bestDistance = newDistance;
26175 bestSyncPoint = syncPoints[i].syncPoint;
26176 bestStrategy = syncPoints[i].strategy;
26177 }
26178 }
26179
26180 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
26181 return bestSyncPoint;
26182 }
26183 /**
26184 * Save any meta-data present on the segments when segments leave
26185 * the live window to the playlist to allow for synchronization at the
26186 * playlist level later.
26187 *
26188 * @param {Playlist} oldPlaylist - The previous active playlist
26189 * @param {Playlist} newPlaylist - The updated and most current playlist
26190 */
26191 ;
26192
26193 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
26194 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
26195
26196 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
26197 videojs__default["default"].log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
26198 return;
26199 } // When a segment expires from the playlist and it has a start time
26200 // save that information as a possible sync-point reference in future
26201
26202
26203 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
26204 var lastRemovedSegment = oldPlaylist.segments[i];
26205
26206 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
26207 newPlaylist.syncInfo = {
26208 mediaSequence: oldPlaylist.mediaSequence + i,
26209 time: lastRemovedSegment.start
26210 };
26211 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
26212 this.trigger('syncinfoupdate');
26213 break;
26214 }
26215 }
26216 }
26217 /**
26218 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
26219 * before segments start to load.
26220 *
26221 * @param {Playlist} playlist - The currently active playlist
26222 */
26223 ;
26224
26225 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
26226 // It's possible for the playlist to be updated before playback starts, meaning time
26227 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
26228 // crossed, then the old time zero mapping (for the prior timeline) would be retained
26229 // unless the mappings are cleared.
26230 this.timelineToDatetimeMappings = {};
26231
26232 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
26233 var firstSegment = playlist.segments[0];
26234 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
26235 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
26236 }
26237 }
26238 /**
26239 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
26240 * based on the latest timing information.
26241 *
26242 * @param {Object} options
26243 * Options object
26244 * @param {SegmentInfo} options.segmentInfo
26245 * The current active request information
26246 * @param {boolean} options.shouldSaveTimelineMapping
26247 * If there's a timeline change, determines if the timeline mapping should be
26248 * saved for timeline mapping and program date time mappings.
26249 */
26250 ;
26251
26252 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
26253 var segmentInfo = _ref.segmentInfo,
26254 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
26255 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
26256 var segment = segmentInfo.segment;
26257
26258 if (didCalculateSegmentTimeMapping) {
26259 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
26260 // now with segment timing information
26261
26262 if (!segmentInfo.playlist.syncInfo) {
26263 segmentInfo.playlist.syncInfo = {
26264 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
26265 time: segment.start
26266 };
26267 }
26268 }
26269
26270 var dateTime = segment.dateTimeObject;
26271
26272 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
26273 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
26274 }
26275 };
26276
26277 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
26278 if (typeof this.timelines[timeline] === 'undefined') {
26279 return null;
26280 }
26281
26282 return this.timelines[timeline].time;
26283 };
26284
26285 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
26286 if (typeof this.timelines[timeline] === 'undefined') {
26287 return null;
26288 }
26289
26290 return this.timelines[timeline].mapping;
26291 }
26292 /**
26293 * Use the "media time" for a segment to generate a mapping to "display time" and
26294 * save that display time to the segment.
26295 *
26296 * @private
26297 * @param {SegmentInfo} segmentInfo
26298 * The current active request information
26299 * @param {Object} timingInfo
26300 * The start and end time of the current segment in "media time"
26301 * @param {boolean} shouldSaveTimelineMapping
26302 * If there's a timeline change, determines if the timeline mapping should be
26303 * saved in timelines.
26304 * @return {boolean}
26305 * Returns false if segment time mapping could not be calculated
26306 */
26307 ;
26308
26309 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
26310 // TODO: remove side effects
26311 var segment = segmentInfo.segment;
26312 var part = segmentInfo.part;
26313 var mappingObj = this.timelines[segmentInfo.timeline];
26314 var start;
26315 var end;
26316
26317 if (typeof segmentInfo.timestampOffset === 'number') {
26318 mappingObj = {
26319 time: segmentInfo.startOfSegment,
26320 mapping: segmentInfo.startOfSegment - timingInfo.start
26321 };
26322
26323 if (shouldSaveTimelineMapping) {
26324 this.timelines[segmentInfo.timeline] = mappingObj;
26325 this.trigger('timestampoffset');
26326 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
26327 }
26328
26329 start = segmentInfo.startOfSegment;
26330 end = timingInfo.end + mappingObj.mapping;
26331 } else if (mappingObj) {
26332 start = timingInfo.start + mappingObj.mapping;
26333 end = timingInfo.end + mappingObj.mapping;
26334 } else {
26335 return false;
26336 }
26337
26338 if (part) {
26339 part.start = start;
26340 part.end = end;
26341 } // If we don't have a segment start yet or the start value we got
26342 // is less than our current segment.start value, save a new start value.
26343 // We have to do this because parts will have segment timing info saved
26344 // multiple times and we want segment start to be the earliest part start
26345 // value for that segment.
26346
26347
26348 if (!segment.start || start < segment.start) {
26349 segment.start = start;
26350 }
26351
26352 segment.end = end;
26353 return true;
26354 }
26355 /**
26356 * Each time we have discontinuity in the playlist, attempt to calculate the location
26357 * in display of the start of the discontinuity and save that. We also save an accuracy
26358 * value so that we save values with the most accuracy (closest to 0.)
26359 *
26360 * @private
26361 * @param {SegmentInfo} segmentInfo - The current active request information
26362 */
26363 ;
26364
26365 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
26366 var playlist = segmentInfo.playlist;
26367 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
26368 // the start of the range and it's accuracy is 0 (greater accuracy values
26369 // mean more approximation)
26370
26371 if (segment.discontinuity) {
26372 this.discontinuities[segment.timeline] = {
26373 time: segment.start,
26374 accuracy: 0
26375 };
26376 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
26377 // Search for future discontinuities that we can provide better timing
26378 // information for and save that information for sync purposes
26379 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
26380 var segmentIndex = playlist.discontinuityStarts[i];
26381 var discontinuity = playlist.discontinuitySequence + i + 1;
26382 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
26383 var accuracy = Math.abs(mediaIndexDiff);
26384
26385 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
26386 var time = void 0;
26387
26388 if (mediaIndexDiff < 0) {
26389 time = segment.start - sumDurations({
26390 defaultDuration: playlist.targetDuration,
26391 durationList: playlist.segments,
26392 startIndex: segmentInfo.mediaIndex,
26393 endIndex: segmentIndex
26394 });
26395 } else {
26396 time = segment.end + sumDurations({
26397 defaultDuration: playlist.targetDuration,
26398 durationList: playlist.segments,
26399 startIndex: segmentInfo.mediaIndex + 1,
26400 endIndex: segmentIndex
26401 });
26402 }
26403
26404 this.discontinuities[discontinuity] = {
26405 time: time,
26406 accuracy: accuracy
26407 };
26408 }
26409 }
26410 }
26411 };
26412
26413 _proto.dispose = function dispose() {
26414 this.trigger('dispose');
26415 this.off();
26416 };
26417
26418 return SyncController;
26419 }(videojs__default["default"].EventTarget);
26420
26421 /**
26422 * The TimelineChangeController acts as a source for segment loaders to listen for and
26423 * keep track of latest and pending timeline changes. This is useful to ensure proper
26424 * sync, as each loader may need to make a consideration for what timeline the other
26425 * loader is on before making changes which could impact the other loader's media.
26426 *
26427 * @class TimelineChangeController
26428 * @extends videojs.EventTarget
26429 */
26430
26431 var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
26432 inheritsLoose(TimelineChangeController, _videojs$EventTarget);
26433
26434 function TimelineChangeController() {
26435 var _this;
26436
26437 _this = _videojs$EventTarget.call(this) || this;
26438 _this.pendingTimelineChanges_ = {};
26439 _this.lastTimelineChanges_ = {};
26440 return _this;
26441 }
26442
26443 var _proto = TimelineChangeController.prototype;
26444
26445 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
26446 this.pendingTimelineChanges_[type] = null;
26447 this.trigger('pendingtimelinechange');
26448 };
26449
26450 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
26451 var type = _ref.type,
26452 from = _ref.from,
26453 to = _ref.to;
26454
26455 if (typeof from === 'number' && typeof to === 'number') {
26456 this.pendingTimelineChanges_[type] = {
26457 type: type,
26458 from: from,
26459 to: to
26460 };
26461 this.trigger('pendingtimelinechange');
26462 }
26463
26464 return this.pendingTimelineChanges_[type];
26465 };
26466
26467 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
26468 var type = _ref2.type,
26469 from = _ref2.from,
26470 to = _ref2.to;
26471
26472 if (typeof from === 'number' && typeof to === 'number') {
26473 this.lastTimelineChanges_[type] = {
26474 type: type,
26475 from: from,
26476 to: to
26477 };
26478 delete this.pendingTimelineChanges_[type];
26479 this.trigger('timelinechange');
26480 }
26481
26482 return this.lastTimelineChanges_[type];
26483 };
26484
26485 _proto.dispose = function dispose() {
26486 this.trigger('dispose');
26487 this.pendingTimelineChanges_ = {};
26488 this.lastTimelineChanges_ = {};
26489 this.off();
26490 };
26491
26492 return TimelineChangeController;
26493 }(videojs__default["default"].EventTarget);
26494
26495 /* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
26496 var workerCode = transform(getWorkerString(function () {
26497
26498 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
26499
26500 function createCommonjsModule(fn, basedir, module) {
26501 return module = {
26502 path: basedir,
26503 exports: {},
26504 require: function require(path, base) {
26505 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
26506 }
26507 }, fn(module, module.exports), module.exports;
26508 }
26509
26510 function commonjsRequire() {
26511 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
26512 }
26513
26514 var createClass = createCommonjsModule(function (module) {
26515 function _defineProperties(target, props) {
26516 for (var i = 0; i < props.length; i++) {
26517 var descriptor = props[i];
26518 descriptor.enumerable = descriptor.enumerable || false;
26519 descriptor.configurable = true;
26520 if ("value" in descriptor) descriptor.writable = true;
26521 Object.defineProperty(target, descriptor.key, descriptor);
26522 }
26523 }
26524
26525 function _createClass(Constructor, protoProps, staticProps) {
26526 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
26527 if (staticProps) _defineProperties(Constructor, staticProps);
26528 return Constructor;
26529 }
26530
26531 module.exports = _createClass;
26532 module.exports["default"] = module.exports, module.exports.__esModule = true;
26533 });
26534 var setPrototypeOf = createCommonjsModule(function (module) {
26535 function _setPrototypeOf(o, p) {
26536 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
26537 o.__proto__ = p;
26538 return o;
26539 };
26540
26541 module.exports["default"] = module.exports, module.exports.__esModule = true;
26542 return _setPrototypeOf(o, p);
26543 }
26544
26545 module.exports = _setPrototypeOf;
26546 module.exports["default"] = module.exports, module.exports.__esModule = true;
26547 });
26548 var inheritsLoose = createCommonjsModule(function (module) {
26549 function _inheritsLoose(subClass, superClass) {
26550 subClass.prototype = Object.create(superClass.prototype);
26551 subClass.prototype.constructor = subClass;
26552 setPrototypeOf(subClass, superClass);
26553 }
26554
26555 module.exports = _inheritsLoose;
26556 module.exports["default"] = module.exports, module.exports.__esModule = true;
26557 });
26558 /**
26559 * @file stream.js
26560 */
26561
26562 /**
26563 * A lightweight readable stream implemention that handles event dispatching.
26564 *
26565 * @class Stream
26566 */
26567
26568 var Stream = /*#__PURE__*/function () {
26569 function Stream() {
26570 this.listeners = {};
26571 }
26572 /**
26573 * Add a listener for a specified event type.
26574 *
26575 * @param {string} type the event name
26576 * @param {Function} listener the callback to be invoked when an event of
26577 * the specified type occurs
26578 */
26579
26580
26581 var _proto = Stream.prototype;
26582
26583 _proto.on = function on(type, listener) {
26584 if (!this.listeners[type]) {
26585 this.listeners[type] = [];
26586 }
26587
26588 this.listeners[type].push(listener);
26589 }
26590 /**
26591 * Remove a listener for a specified event type.
26592 *
26593 * @param {string} type the event name
26594 * @param {Function} listener a function previously registered for this
26595 * type of event through `on`
26596 * @return {boolean} if we could turn it off or not
26597 */
26598 ;
26599
26600 _proto.off = function off(type, listener) {
26601 if (!this.listeners[type]) {
26602 return false;
26603 }
26604
26605 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
26606 // In Video.js we slice listener functions
26607 // on trigger so that it does not mess up the order
26608 // while we loop through.
26609 //
26610 // Here we slice on off so that the loop in trigger
26611 // can continue using it's old reference to loop without
26612 // messing up the order.
26613
26614 this.listeners[type] = this.listeners[type].slice(0);
26615 this.listeners[type].splice(index, 1);
26616 return index > -1;
26617 }
26618 /**
26619 * Trigger an event of the specified type on this stream. Any additional
26620 * arguments to this function are passed as parameters to event listeners.
26621 *
26622 * @param {string} type the event name
26623 */
26624 ;
26625
26626 _proto.trigger = function trigger(type) {
26627 var callbacks = this.listeners[type];
26628
26629 if (!callbacks) {
26630 return;
26631 } // Slicing the arguments on every invocation of this method
26632 // can add a significant amount of overhead. Avoid the
26633 // intermediate object creation for the common case of a
26634 // single callback argument
26635
26636
26637 if (arguments.length === 2) {
26638 var length = callbacks.length;
26639
26640 for (var i = 0; i < length; ++i) {
26641 callbacks[i].call(this, arguments[1]);
26642 }
26643 } else {
26644 var args = Array.prototype.slice.call(arguments, 1);
26645 var _length = callbacks.length;
26646
26647 for (var _i = 0; _i < _length; ++_i) {
26648 callbacks[_i].apply(this, args);
26649 }
26650 }
26651 }
26652 /**
26653 * Destroys the stream and cleans up.
26654 */
26655 ;
26656
26657 _proto.dispose = function dispose() {
26658 this.listeners = {};
26659 }
26660 /**
26661 * Forwards all `data` events on this stream to the destination stream. The
26662 * destination stream should provide a method `push` to receive the data
26663 * events as they arrive.
26664 *
26665 * @param {Stream} destination the stream that will receive all `data` events
26666 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
26667 */
26668 ;
26669
26670 _proto.pipe = function pipe(destination) {
26671 this.on('data', function (data) {
26672 destination.push(data);
26673 });
26674 };
26675
26676 return Stream;
26677 }();
26678 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
26679
26680 /**
26681 * Returns the subarray of a Uint8Array without PKCS#7 padding.
26682 *
26683 * @param padded {Uint8Array} unencrypted bytes that have been padded
26684 * @return {Uint8Array} the unpadded bytes
26685 * @see http://tools.ietf.org/html/rfc5652
26686 */
26687
26688
26689 function unpad(padded) {
26690 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
26691 }
26692 /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
26693
26694 /**
26695 * @file aes.js
26696 *
26697 * This file contains an adaptation of the AES decryption algorithm
26698 * from the Standford Javascript Cryptography Library. That work is
26699 * covered by the following copyright and permissions notice:
26700 *
26701 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
26702 * All rights reserved.
26703 *
26704 * Redistribution and use in source and binary forms, with or without
26705 * modification, are permitted provided that the following conditions are
26706 * met:
26707 *
26708 * 1. Redistributions of source code must retain the above copyright
26709 * notice, this list of conditions and the following disclaimer.
26710 *
26711 * 2. Redistributions in binary form must reproduce the above
26712 * copyright notice, this list of conditions and the following
26713 * disclaimer in the documentation and/or other materials provided
26714 * with the distribution.
26715 *
26716 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
26717 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
26718 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26719 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
26720 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26721 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26722 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
26723 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26724 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26725 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26726 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26727 *
26728 * The views and conclusions contained in the software and documentation
26729 * are those of the authors and should not be interpreted as representing
26730 * official policies, either expressed or implied, of the authors.
26731 */
26732
26733 /**
26734 * Expand the S-box tables.
26735 *
26736 * @private
26737 */
26738
26739
26740 var precompute = function precompute() {
26741 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
26742 var encTable = tables[0];
26743 var decTable = tables[1];
26744 var sbox = encTable[4];
26745 var sboxInv = decTable[4];
26746 var i;
26747 var x;
26748 var xInv;
26749 var d = [];
26750 var th = [];
26751 var x2;
26752 var x4;
26753 var x8;
26754 var s;
26755 var tEnc;
26756 var tDec; // Compute double and third tables
26757
26758 for (i = 0; i < 256; i++) {
26759 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
26760 }
26761
26762 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
26763 // Compute sbox
26764 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
26765 s = s >> 8 ^ s & 255 ^ 99;
26766 sbox[x] = s;
26767 sboxInv[s] = x; // Compute MixColumns
26768
26769 x8 = d[x4 = d[x2 = d[x]]];
26770 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
26771 tEnc = d[s] * 0x101 ^ s * 0x1010100;
26772
26773 for (i = 0; i < 4; i++) {
26774 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
26775 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
26776 }
26777 } // Compactify. Considerable speedup on Firefox.
26778
26779
26780 for (i = 0; i < 5; i++) {
26781 encTable[i] = encTable[i].slice(0);
26782 decTable[i] = decTable[i].slice(0);
26783 }
26784
26785 return tables;
26786 };
26787
26788 var aesTables = null;
26789 /**
26790 * Schedule out an AES key for both encryption and decryption. This
26791 * is a low-level class. Use a cipher mode to do bulk encryption.
26792 *
26793 * @class AES
26794 * @param key {Array} The key as an array of 4, 6 or 8 words.
26795 */
26796
26797 var AES = /*#__PURE__*/function () {
26798 function AES(key) {
26799 /**
26800 * The expanded S-box and inverse S-box tables. These will be computed
26801 * on the client so that we don't have to send them down the wire.
26802 *
26803 * There are two tables, _tables[0] is for encryption and
26804 * _tables[1] is for decryption.
26805 *
26806 * The first 4 sub-tables are the expanded S-box with MixColumns. The
26807 * last (_tables[01][4]) is the S-box itself.
26808 *
26809 * @private
26810 */
26811 // if we have yet to precompute the S-box tables
26812 // do so now
26813 if (!aesTables) {
26814 aesTables = precompute();
26815 } // then make a copy of that object for use
26816
26817
26818 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
26819 var i;
26820 var j;
26821 var tmp;
26822 var sbox = this._tables[0][4];
26823 var decTable = this._tables[1];
26824 var keyLen = key.length;
26825 var rcon = 1;
26826
26827 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
26828 throw new Error('Invalid aes key size');
26829 }
26830
26831 var encKey = key.slice(0);
26832 var decKey = [];
26833 this._key = [encKey, decKey]; // schedule encryption keys
26834
26835 for (i = keyLen; i < 4 * keyLen + 28; i++) {
26836 tmp = encKey[i - 1]; // apply sbox
26837
26838 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
26839 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
26840
26841 if (i % keyLen === 0) {
26842 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
26843 rcon = rcon << 1 ^ (rcon >> 7) * 283;
26844 }
26845 }
26846
26847 encKey[i] = encKey[i - keyLen] ^ tmp;
26848 } // schedule decryption keys
26849
26850
26851 for (j = 0; i; j++, i--) {
26852 tmp = encKey[j & 3 ? i : i - 4];
26853
26854 if (i <= 4 || j < 4) {
26855 decKey[j] = tmp;
26856 } else {
26857 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
26858 }
26859 }
26860 }
26861 /**
26862 * Decrypt 16 bytes, specified as four 32-bit words.
26863 *
26864 * @param {number} encrypted0 the first word to decrypt
26865 * @param {number} encrypted1 the second word to decrypt
26866 * @param {number} encrypted2 the third word to decrypt
26867 * @param {number} encrypted3 the fourth word to decrypt
26868 * @param {Int32Array} out the array to write the decrypted words
26869 * into
26870 * @param {number} offset the offset into the output array to start
26871 * writing results
26872 * @return {Array} The plaintext.
26873 */
26874
26875
26876 var _proto = AES.prototype;
26877
26878 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
26879 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
26880
26881 var a = encrypted0 ^ key[0];
26882 var b = encrypted3 ^ key[1];
26883 var c = encrypted2 ^ key[2];
26884 var d = encrypted1 ^ key[3];
26885 var a2;
26886 var b2;
26887 var c2; // key.length === 2 ?
26888
26889 var nInnerRounds = key.length / 4 - 2;
26890 var i;
26891 var kIndex = 4;
26892 var table = this._tables[1]; // load up the tables
26893
26894 var table0 = table[0];
26895 var table1 = table[1];
26896 var table2 = table[2];
26897 var table3 = table[3];
26898 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
26899
26900 for (i = 0; i < nInnerRounds; i++) {
26901 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
26902 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
26903 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
26904 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
26905 kIndex += 4;
26906 a = a2;
26907 b = b2;
26908 c = c2;
26909 } // Last round.
26910
26911
26912 for (i = 0; i < 4; i++) {
26913 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
26914 a2 = a;
26915 a = b;
26916 b = c;
26917 c = d;
26918 d = a2;
26919 }
26920 };
26921
26922 return AES;
26923 }();
26924 /**
26925 * A wrapper around the Stream class to use setTimeout
26926 * and run stream "jobs" Asynchronously
26927 *
26928 * @class AsyncStream
26929 * @extends Stream
26930 */
26931
26932
26933 var AsyncStream = /*#__PURE__*/function (_Stream) {
26934 inheritsLoose(AsyncStream, _Stream);
26935
26936 function AsyncStream() {
26937 var _this;
26938
26939 _this = _Stream.call(this, Stream) || this;
26940 _this.jobs = [];
26941 _this.delay = 1;
26942 _this.timeout_ = null;
26943 return _this;
26944 }
26945 /**
26946 * process an async job
26947 *
26948 * @private
26949 */
26950
26951
26952 var _proto = AsyncStream.prototype;
26953
26954 _proto.processJob_ = function processJob_() {
26955 this.jobs.shift()();
26956
26957 if (this.jobs.length) {
26958 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
26959 } else {
26960 this.timeout_ = null;
26961 }
26962 }
26963 /**
26964 * push a job into the stream
26965 *
26966 * @param {Function} job the job to push into the stream
26967 */
26968 ;
26969
26970 _proto.push = function push(job) {
26971 this.jobs.push(job);
26972
26973 if (!this.timeout_) {
26974 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
26975 }
26976 };
26977
26978 return AsyncStream;
26979 }(Stream);
26980 /**
26981 * Convert network-order (big-endian) bytes into their little-endian
26982 * representation.
26983 */
26984
26985
26986 var ntoh = function ntoh(word) {
26987 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
26988 };
26989 /**
26990 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
26991 *
26992 * @param {Uint8Array} encrypted the encrypted bytes
26993 * @param {Uint32Array} key the bytes of the decryption key
26994 * @param {Uint32Array} initVector the initialization vector (IV) to
26995 * use for the first round of CBC.
26996 * @return {Uint8Array} the decrypted bytes
26997 *
26998 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
26999 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
27000 * @see https://tools.ietf.org/html/rfc2315
27001 */
27002
27003
27004 var decrypt = function decrypt(encrypted, key, initVector) {
27005 // word-level access to the encrypted bytes
27006 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
27007 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
27008
27009 var decrypted = new Uint8Array(encrypted.byteLength);
27010 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
27011 // decrypted data
27012
27013 var init0;
27014 var init1;
27015 var init2;
27016 var init3;
27017 var encrypted0;
27018 var encrypted1;
27019 var encrypted2;
27020 var encrypted3; // iteration variable
27021
27022 var wordIx; // pull out the words of the IV to ensure we don't modify the
27023 // passed-in reference and easier access
27024
27025 init0 = initVector[0];
27026 init1 = initVector[1];
27027 init2 = initVector[2];
27028 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
27029 // to each decrypted block
27030
27031 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
27032 // convert big-endian (network order) words into little-endian
27033 // (javascript order)
27034 encrypted0 = ntoh(encrypted32[wordIx]);
27035 encrypted1 = ntoh(encrypted32[wordIx + 1]);
27036 encrypted2 = ntoh(encrypted32[wordIx + 2]);
27037 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
27038
27039 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
27040 // plaintext
27041
27042 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
27043 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
27044 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
27045 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
27046
27047 init0 = encrypted0;
27048 init1 = encrypted1;
27049 init2 = encrypted2;
27050 init3 = encrypted3;
27051 }
27052
27053 return decrypted;
27054 };
27055 /**
27056 * The `Decrypter` class that manages decryption of AES
27057 * data through `AsyncStream` objects and the `decrypt`
27058 * function
27059 *
27060 * @param {Uint8Array} encrypted the encrypted bytes
27061 * @param {Uint32Array} key the bytes of the decryption key
27062 * @param {Uint32Array} initVector the initialization vector (IV) to
27063 * @param {Function} done the function to run when done
27064 * @class Decrypter
27065 */
27066
27067
27068 var Decrypter = /*#__PURE__*/function () {
27069 function Decrypter(encrypted, key, initVector, done) {
27070 var step = Decrypter.STEP;
27071 var encrypted32 = new Int32Array(encrypted.buffer);
27072 var decrypted = new Uint8Array(encrypted.byteLength);
27073 var i = 0;
27074 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
27075
27076 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27077
27078 for (i = step; i < encrypted32.length; i += step) {
27079 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
27080 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27081 } // invoke the done() callback when everything is finished
27082
27083
27084 this.asyncStream_.push(function () {
27085 // remove pkcs#7 padding from the decrypted bytes
27086 done(null, unpad(decrypted));
27087 });
27088 }
27089 /**
27090 * a getter for step the maximum number of bytes to process at one time
27091 *
27092 * @return {number} the value of step 32000
27093 */
27094
27095
27096 var _proto = Decrypter.prototype;
27097 /**
27098 * @private
27099 */
27100
27101 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
27102 return function () {
27103 var bytes = decrypt(encrypted, key, initVector);
27104 decrypted.set(bytes, encrypted.byteOffset);
27105 };
27106 };
27107
27108 createClass(Decrypter, null, [{
27109 key: "STEP",
27110 get: function get() {
27111 // 4 * 8000;
27112 return 32000;
27113 }
27114 }]);
27115 return Decrypter;
27116 }();
27117
27118 var win;
27119
27120 if (typeof window !== "undefined") {
27121 win = window;
27122 } else if (typeof commonjsGlobal !== "undefined") {
27123 win = commonjsGlobal;
27124 } else if (typeof self !== "undefined") {
27125 win = self;
27126 } else {
27127 win = {};
27128 }
27129
27130 var window_1 = win;
27131
27132 var isArrayBufferView = function isArrayBufferView(obj) {
27133 if (ArrayBuffer.isView === 'function') {
27134 return ArrayBuffer.isView(obj);
27135 }
27136
27137 return obj && obj.buffer instanceof ArrayBuffer;
27138 };
27139
27140 var BigInt = window_1.BigInt || Number;
27141 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
27142
27143 (function () {
27144 var a = new Uint16Array([0xFFCC]);
27145 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
27146
27147 if (b[0] === 0xFF) {
27148 return 'big';
27149 }
27150
27151 if (b[0] === 0xCC) {
27152 return 'little';
27153 }
27154
27155 return 'unknown';
27156 })();
27157 /**
27158 * Creates an object for sending to a web worker modifying properties that are TypedArrays
27159 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
27160 *
27161 * @param {Object} message
27162 * Object of properties and values to send to the web worker
27163 * @return {Object}
27164 * Modified message with TypedArray values expanded
27165 * @function createTransferableMessage
27166 */
27167
27168
27169 var createTransferableMessage = function createTransferableMessage(message) {
27170 var transferable = {};
27171 Object.keys(message).forEach(function (key) {
27172 var value = message[key];
27173
27174 if (isArrayBufferView(value)) {
27175 transferable[key] = {
27176 bytes: value.buffer,
27177 byteOffset: value.byteOffset,
27178 byteLength: value.byteLength
27179 };
27180 } else {
27181 transferable[key] = value;
27182 }
27183 });
27184 return transferable;
27185 };
27186 /* global self */
27187
27188 /**
27189 * Our web worker interface so that things can talk to aes-decrypter
27190 * that will be running in a web worker. the scope is passed to this by
27191 * webworkify.
27192 */
27193
27194
27195 self.onmessage = function (event) {
27196 var data = event.data;
27197 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
27198 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
27199 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
27200 /* eslint-disable no-new, handle-callback-err */
27201
27202 new Decrypter(encrypted, key, iv, function (err, bytes) {
27203 self.postMessage(createTransferableMessage({
27204 source: data.source,
27205 decrypted: bytes
27206 }), [bytes.buffer]);
27207 });
27208 /* eslint-enable */
27209 };
27210 }));
27211 var Decrypter = factory(workerCode);
27212 /* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
27213
27214 /**
27215 * Convert the properties of an HLS track into an audioTrackKind.
27216 *
27217 * @private
27218 */
27219
27220 var audioTrackKind_ = function audioTrackKind_(properties) {
27221 var kind = properties.default ? 'main' : 'alternative';
27222
27223 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
27224 kind = 'main-desc';
27225 }
27226
27227 return kind;
27228 };
27229 /**
27230 * Pause provided segment loader and playlist loader if active
27231 *
27232 * @param {SegmentLoader} segmentLoader
27233 * SegmentLoader to pause
27234 * @param {Object} mediaType
27235 * Active media type
27236 * @function stopLoaders
27237 */
27238
27239
27240 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
27241 segmentLoader.abort();
27242 segmentLoader.pause();
27243
27244 if (mediaType && mediaType.activePlaylistLoader) {
27245 mediaType.activePlaylistLoader.pause();
27246 mediaType.activePlaylistLoader = null;
27247 }
27248 };
27249 /**
27250 * Start loading provided segment loader and playlist loader
27251 *
27252 * @param {PlaylistLoader} playlistLoader
27253 * PlaylistLoader to start loading
27254 * @param {Object} mediaType
27255 * Active media type
27256 * @function startLoaders
27257 */
27258
27259 var startLoaders = function startLoaders(playlistLoader, mediaType) {
27260 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
27261 // playlist loader
27262 mediaType.activePlaylistLoader = playlistLoader;
27263 playlistLoader.load();
27264 };
27265 /**
27266 * Returns a function to be called when the media group changes. It performs a
27267 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
27268 * change of group is merely a rendition switch of the same content at another encoding,
27269 * rather than a change of content, such as switching audio from English to Spanish.
27270 *
27271 * @param {string} type
27272 * MediaGroup type
27273 * @param {Object} settings
27274 * Object containing required information for media groups
27275 * @return {Function}
27276 * Handler for a non-destructive resync of SegmentLoader when the active media
27277 * group changes.
27278 * @function onGroupChanged
27279 */
27280
27281 var onGroupChanged = function onGroupChanged(type, settings) {
27282 return function () {
27283 var _settings$segmentLoad = settings.segmentLoaders,
27284 segmentLoader = _settings$segmentLoad[type],
27285 mainSegmentLoader = _settings$segmentLoad.main,
27286 mediaType = settings.mediaTypes[type];
27287 var activeTrack = mediaType.activeTrack();
27288 var activeGroup = mediaType.getActiveGroup();
27289 var previousActiveLoader = mediaType.activePlaylistLoader;
27290 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
27291
27292 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
27293 return;
27294 }
27295
27296 mediaType.lastGroup_ = activeGroup;
27297 mediaType.lastTrack_ = activeTrack;
27298 stopLoaders(segmentLoader, mediaType);
27299
27300 if (!activeGroup || activeGroup.isMasterPlaylist) {
27301 // there is no group active or active group is a main playlist and won't change
27302 return;
27303 }
27304
27305 if (!activeGroup.playlistLoader) {
27306 if (previousActiveLoader) {
27307 // The previous group had a playlist loader but the new active group does not
27308 // this means we are switching from demuxed to muxed audio. In this case we want to
27309 // do a destructive reset of the main segment loader and not restart the audio
27310 // loaders.
27311 mainSegmentLoader.resetEverything();
27312 }
27313
27314 return;
27315 } // Non-destructive resync
27316
27317
27318 segmentLoader.resyncLoader();
27319 startLoaders(activeGroup.playlistLoader, mediaType);
27320 };
27321 };
27322 var onGroupChanging = function onGroupChanging(type, settings) {
27323 return function () {
27324 var segmentLoader = settings.segmentLoaders[type],
27325 mediaType = settings.mediaTypes[type];
27326 mediaType.lastGroup_ = null;
27327 segmentLoader.abort();
27328 segmentLoader.pause();
27329 };
27330 };
27331 /**
27332 * Returns a function to be called when the media track changes. It performs a
27333 * destructive reset of the SegmentLoader to ensure we start loading as close to
27334 * currentTime as possible.
27335 *
27336 * @param {string} type
27337 * MediaGroup type
27338 * @param {Object} settings
27339 * Object containing required information for media groups
27340 * @return {Function}
27341 * Handler for a destructive reset of SegmentLoader when the active media
27342 * track changes.
27343 * @function onTrackChanged
27344 */
27345
27346 var onTrackChanged = function onTrackChanged(type, settings) {
27347 return function () {
27348 var masterPlaylistLoader = settings.masterPlaylistLoader,
27349 _settings$segmentLoad2 = settings.segmentLoaders,
27350 segmentLoader = _settings$segmentLoad2[type],
27351 mainSegmentLoader = _settings$segmentLoad2.main,
27352 mediaType = settings.mediaTypes[type];
27353 var activeTrack = mediaType.activeTrack();
27354 var activeGroup = mediaType.getActiveGroup();
27355 var previousActiveLoader = mediaType.activePlaylistLoader;
27356 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
27357
27358 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
27359 return;
27360 }
27361
27362 mediaType.lastGroup_ = activeGroup;
27363 mediaType.lastTrack_ = activeTrack;
27364 stopLoaders(segmentLoader, mediaType);
27365
27366 if (!activeGroup) {
27367 // there is no group active so we do not want to restart loaders
27368 return;
27369 }
27370
27371 if (activeGroup.isMasterPlaylist) {
27372 // track did not change, do nothing
27373 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
27374 return;
27375 }
27376
27377 var mpc = settings.vhs.masterPlaylistController_;
27378 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
27379
27380 if (mpc.media() === newPlaylist) {
27381 return;
27382 }
27383
27384 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
27385 masterPlaylistLoader.pause();
27386 mainSegmentLoader.resetEverything();
27387 mpc.fastQualityChange_(newPlaylist);
27388 return;
27389 }
27390
27391 if (type === 'AUDIO') {
27392 if (!activeGroup.playlistLoader) {
27393 // when switching from demuxed audio/video to muxed audio/video (noted by no
27394 // playlist loader for the audio group), we want to do a destructive reset of the
27395 // main segment loader and not restart the audio loaders
27396 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
27397 // it should be stopped
27398
27399 mainSegmentLoader.resetEverything();
27400 return;
27401 } // although the segment loader is an audio segment loader, call the setAudio
27402 // function to ensure it is prepared to re-append the init segment (or handle other
27403 // config changes)
27404
27405
27406 segmentLoader.setAudio(true);
27407 mainSegmentLoader.setAudio(false);
27408 }
27409
27410 if (previousActiveLoader === activeGroup.playlistLoader) {
27411 // Nothing has actually changed. This can happen because track change events can fire
27412 // multiple times for a "single" change. One for enabling the new active track, and
27413 // one for disabling the track that was active
27414 startLoaders(activeGroup.playlistLoader, mediaType);
27415 return;
27416 }
27417
27418 if (segmentLoader.track) {
27419 // For WebVTT, set the new text track in the segmentloader
27420 segmentLoader.track(activeTrack);
27421 } // destructive reset
27422
27423
27424 segmentLoader.resetEverything();
27425 startLoaders(activeGroup.playlistLoader, mediaType);
27426 };
27427 };
27428 var onError = {
27429 /**
27430 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27431 * an error.
27432 *
27433 * @param {string} type
27434 * MediaGroup type
27435 * @param {Object} settings
27436 * Object containing required information for media groups
27437 * @return {Function}
27438 * Error handler. Logs warning (or error if the playlist is blacklisted) to
27439 * console and switches back to default audio track.
27440 * @function onError.AUDIO
27441 */
27442 AUDIO: function AUDIO(type, settings) {
27443 return function () {
27444 var segmentLoader = settings.segmentLoaders[type],
27445 mediaType = settings.mediaTypes[type],
27446 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
27447 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
27448
27449 var activeTrack = mediaType.activeTrack();
27450 var activeGroup = mediaType.activeGroup();
27451 var id = (activeGroup.filter(function (group) {
27452 return group.default;
27453 })[0] || activeGroup[0]).id;
27454 var defaultTrack = mediaType.tracks[id];
27455
27456 if (activeTrack === defaultTrack) {
27457 // Default track encountered an error. All we can do now is blacklist the current
27458 // rendition and hope another will switch audio groups
27459 blacklistCurrentPlaylist({
27460 message: 'Problem encountered loading the default audio track.'
27461 });
27462 return;
27463 }
27464
27465 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
27466
27467 for (var trackId in mediaType.tracks) {
27468 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
27469 }
27470
27471 mediaType.onTrackChanged();
27472 };
27473 },
27474
27475 /**
27476 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27477 * an error.
27478 *
27479 * @param {string} type
27480 * MediaGroup type
27481 * @param {Object} settings
27482 * Object containing required information for media groups
27483 * @return {Function}
27484 * Error handler. Logs warning to console and disables the active subtitle track
27485 * @function onError.SUBTITLES
27486 */
27487 SUBTITLES: function SUBTITLES(type, settings) {
27488 return function () {
27489 var segmentLoader = settings.segmentLoaders[type],
27490 mediaType = settings.mediaTypes[type];
27491 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
27492 stopLoaders(segmentLoader, mediaType);
27493 var track = mediaType.activeTrack();
27494
27495 if (track) {
27496 track.mode = 'disabled';
27497 }
27498
27499 mediaType.onTrackChanged();
27500 };
27501 }
27502 };
27503 var setupListeners = {
27504 /**
27505 * Setup event listeners for audio playlist loader
27506 *
27507 * @param {string} type
27508 * MediaGroup type
27509 * @param {PlaylistLoader|null} playlistLoader
27510 * PlaylistLoader to register listeners on
27511 * @param {Object} settings
27512 * Object containing required information for media groups
27513 * @function setupListeners.AUDIO
27514 */
27515 AUDIO: function AUDIO(type, playlistLoader, settings) {
27516 if (!playlistLoader) {
27517 // no playlist loader means audio will be muxed with the video
27518 return;
27519 }
27520
27521 var tech = settings.tech,
27522 requestOptions = settings.requestOptions,
27523 segmentLoader = settings.segmentLoaders[type];
27524 playlistLoader.on('loadedmetadata', function () {
27525 var media = playlistLoader.media();
27526 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
27527 // permits, start downloading segments
27528
27529 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27530 segmentLoader.load();
27531 }
27532 });
27533 playlistLoader.on('loadedplaylist', function () {
27534 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27535
27536 if (!tech.paused()) {
27537 segmentLoader.load();
27538 }
27539 });
27540 playlistLoader.on('error', onError[type](type, settings));
27541 },
27542
27543 /**
27544 * Setup event listeners for subtitle playlist loader
27545 *
27546 * @param {string} type
27547 * MediaGroup type
27548 * @param {PlaylistLoader|null} playlistLoader
27549 * PlaylistLoader to register listeners on
27550 * @param {Object} settings
27551 * Object containing required information for media groups
27552 * @function setupListeners.SUBTITLES
27553 */
27554 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
27555 var tech = settings.tech,
27556 requestOptions = settings.requestOptions,
27557 segmentLoader = settings.segmentLoaders[type],
27558 mediaType = settings.mediaTypes[type];
27559 playlistLoader.on('loadedmetadata', function () {
27560 var media = playlistLoader.media();
27561 segmentLoader.playlist(media, requestOptions);
27562 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
27563 // permits, start downloading segments
27564
27565 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27566 segmentLoader.load();
27567 }
27568 });
27569 playlistLoader.on('loadedplaylist', function () {
27570 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27571
27572 if (!tech.paused()) {
27573 segmentLoader.load();
27574 }
27575 });
27576 playlistLoader.on('error', onError[type](type, settings));
27577 }
27578 };
27579 var initialize = {
27580 /**
27581 * Setup PlaylistLoaders and AudioTracks for the audio groups
27582 *
27583 * @param {string} type
27584 * MediaGroup type
27585 * @param {Object} settings
27586 * Object containing required information for media groups
27587 * @function initialize.AUDIO
27588 */
27589 'AUDIO': function AUDIO(type, settings) {
27590 var vhs = settings.vhs,
27591 sourceType = settings.sourceType,
27592 segmentLoader = settings.segmentLoaders[type],
27593 requestOptions = settings.requestOptions,
27594 mediaGroups = settings.master.mediaGroups,
27595 _settings$mediaTypes$ = settings.mediaTypes[type],
27596 groups = _settings$mediaTypes$.groups,
27597 tracks = _settings$mediaTypes$.tracks,
27598 logger_ = _settings$mediaTypes$.logger_,
27599 masterPlaylistLoader = settings.masterPlaylistLoader;
27600 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
27601
27602 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
27603 mediaGroups[type] = {
27604 main: {
27605 default: {
27606 default: true
27607 }
27608 }
27609 };
27610
27611 if (audioOnlyMaster) {
27612 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
27613 }
27614 }
27615
27616 for (var groupId in mediaGroups[type]) {
27617 if (!groups[groupId]) {
27618 groups[groupId] = [];
27619 }
27620
27621 for (var variantLabel in mediaGroups[type][groupId]) {
27622 var properties = mediaGroups[type][groupId][variantLabel];
27623 var playlistLoader = void 0;
27624
27625 if (audioOnlyMaster) {
27626 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
27627 properties.isMasterPlaylist = true;
27628 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
27629 // use the resolved media playlist object
27630 } else if (sourceType === 'vhs-json' && properties.playlists) {
27631 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
27632 } else if (properties.resolvedUri) {
27633 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
27634 // should we even have properties.playlists in this check.
27635 } else if (properties.playlists && sourceType === 'dash') {
27636 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27637 } else {
27638 // no resolvedUri means the audio is muxed with the video when using this
27639 // audio track
27640 playlistLoader = null;
27641 }
27642
27643 properties = videojs__default["default"].mergeOptions({
27644 id: variantLabel,
27645 playlistLoader: playlistLoader
27646 }, properties);
27647 setupListeners[type](type, properties.playlistLoader, settings);
27648 groups[groupId].push(properties);
27649
27650 if (typeof tracks[variantLabel] === 'undefined') {
27651 var track = new videojs__default["default"].AudioTrack({
27652 id: variantLabel,
27653 kind: audioTrackKind_(properties),
27654 enabled: false,
27655 language: properties.language,
27656 default: properties.default,
27657 label: variantLabel
27658 });
27659 tracks[variantLabel] = track;
27660 }
27661 }
27662 } // setup single error event handler for the segment loader
27663
27664
27665 segmentLoader.on('error', onError[type](type, settings));
27666 },
27667
27668 /**
27669 * Setup PlaylistLoaders and TextTracks for the subtitle groups
27670 *
27671 * @param {string} type
27672 * MediaGroup type
27673 * @param {Object} settings
27674 * Object containing required information for media groups
27675 * @function initialize.SUBTITLES
27676 */
27677 'SUBTITLES': function SUBTITLES(type, settings) {
27678 var tech = settings.tech,
27679 vhs = settings.vhs,
27680 sourceType = settings.sourceType,
27681 segmentLoader = settings.segmentLoaders[type],
27682 requestOptions = settings.requestOptions,
27683 mediaGroups = settings.master.mediaGroups,
27684 _settings$mediaTypes$2 = settings.mediaTypes[type],
27685 groups = _settings$mediaTypes$2.groups,
27686 tracks = _settings$mediaTypes$2.tracks,
27687 masterPlaylistLoader = settings.masterPlaylistLoader;
27688
27689 for (var groupId in mediaGroups[type]) {
27690 if (!groups[groupId]) {
27691 groups[groupId] = [];
27692 }
27693
27694 for (var variantLabel in mediaGroups[type][groupId]) {
27695 if (mediaGroups[type][groupId][variantLabel].forced) {
27696 // Subtitle playlists with the forced attribute are not selectable in Safari.
27697 // According to Apple's HLS Authoring Specification:
27698 // If content has forced subtitles and regular subtitles in a given language,
27699 // the regular subtitles track in that language MUST contain both the forced
27700 // subtitles and the regular subtitles for that language.
27701 // Because of this requirement and that Safari does not add forced subtitles,
27702 // forced subtitles are skipped here to maintain consistent experience across
27703 // all platforms
27704 continue;
27705 }
27706
27707 var properties = mediaGroups[type][groupId][variantLabel];
27708 var playlistLoader = void 0;
27709
27710 if (sourceType === 'hls') {
27711 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
27712 } else if (sourceType === 'dash') {
27713 var playlists = properties.playlists.filter(function (p) {
27714 return p.excludeUntil !== Infinity;
27715 });
27716
27717 if (!playlists.length) {
27718 return;
27719 }
27720
27721 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27722 } else if (sourceType === 'vhs-json') {
27723 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
27724 // as provided, otherwise use the resolved URI to load the playlist
27725 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
27726 }
27727
27728 properties = videojs__default["default"].mergeOptions({
27729 id: variantLabel,
27730 playlistLoader: playlistLoader
27731 }, properties);
27732 setupListeners[type](type, properties.playlistLoader, settings);
27733 groups[groupId].push(properties);
27734
27735 if (typeof tracks[variantLabel] === 'undefined') {
27736 var track = tech.addRemoteTextTrack({
27737 id: variantLabel,
27738 kind: 'subtitles',
27739 default: properties.default && properties.autoselect,
27740 language: properties.language,
27741 label: variantLabel
27742 }, false).track;
27743 tracks[variantLabel] = track;
27744 }
27745 }
27746 } // setup single error event handler for the segment loader
27747
27748
27749 segmentLoader.on('error', onError[type](type, settings));
27750 },
27751
27752 /**
27753 * Setup TextTracks for the closed-caption groups
27754 *
27755 * @param {String} type
27756 * MediaGroup type
27757 * @param {Object} settings
27758 * Object containing required information for media groups
27759 * @function initialize['CLOSED-CAPTIONS']
27760 */
27761 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
27762 var tech = settings.tech,
27763 mediaGroups = settings.master.mediaGroups,
27764 _settings$mediaTypes$3 = settings.mediaTypes[type],
27765 groups = _settings$mediaTypes$3.groups,
27766 tracks = _settings$mediaTypes$3.tracks;
27767
27768 for (var groupId in mediaGroups[type]) {
27769 if (!groups[groupId]) {
27770 groups[groupId] = [];
27771 }
27772
27773 for (var variantLabel in mediaGroups[type][groupId]) {
27774 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
27775
27776 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
27777 continue;
27778 }
27779
27780 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
27781 var newProps = {
27782 label: variantLabel,
27783 language: properties.language,
27784 instreamId: properties.instreamId,
27785 default: properties.default && properties.autoselect
27786 };
27787
27788 if (captionServices[newProps.instreamId]) {
27789 newProps = videojs__default["default"].mergeOptions(newProps, captionServices[newProps.instreamId]);
27790 }
27791
27792 if (newProps.default === undefined) {
27793 delete newProps.default;
27794 } // No PlaylistLoader is required for Closed-Captions because the captions are
27795 // embedded within the video stream
27796
27797
27798 groups[groupId].push(videojs__default["default"].mergeOptions({
27799 id: variantLabel
27800 }, properties));
27801
27802 if (typeof tracks[variantLabel] === 'undefined') {
27803 var track = tech.addRemoteTextTrack({
27804 id: newProps.instreamId,
27805 kind: 'captions',
27806 default: newProps.default,
27807 language: newProps.language,
27808 label: newProps.label
27809 }, false).track;
27810 tracks[variantLabel] = track;
27811 }
27812 }
27813 }
27814 }
27815 };
27816
27817 var groupMatch = function groupMatch(list, media) {
27818 for (var i = 0; i < list.length; i++) {
27819 if (playlistMatch(media, list[i])) {
27820 return true;
27821 }
27822
27823 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
27824 return true;
27825 }
27826 }
27827
27828 return false;
27829 };
27830 /**
27831 * Returns a function used to get the active group of the provided type
27832 *
27833 * @param {string} type
27834 * MediaGroup type
27835 * @param {Object} settings
27836 * Object containing required information for media groups
27837 * @return {Function}
27838 * Function that returns the active media group for the provided type. Takes an
27839 * optional parameter {TextTrack} track. If no track is provided, a list of all
27840 * variants in the group, otherwise the variant corresponding to the provided
27841 * track is returned.
27842 * @function activeGroup
27843 */
27844
27845
27846 var activeGroup = function activeGroup(type, settings) {
27847 return function (track) {
27848 var masterPlaylistLoader = settings.masterPlaylistLoader,
27849 groups = settings.mediaTypes[type].groups;
27850 var media = masterPlaylistLoader.media();
27851
27852 if (!media) {
27853 return null;
27854 }
27855
27856 var variants = null; // set to variants to main media active group
27857
27858 if (media.attributes[type]) {
27859 variants = groups[media.attributes[type]];
27860 }
27861
27862 var groupKeys = Object.keys(groups);
27863
27864 if (!variants) {
27865 // find the masterPlaylistLoader media
27866 // that is in a media group if we are dealing
27867 // with audio only
27868 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
27869 for (var i = 0; i < groupKeys.length; i++) {
27870 var groupPropertyList = groups[groupKeys[i]];
27871
27872 if (groupMatch(groupPropertyList, media)) {
27873 variants = groupPropertyList;
27874 break;
27875 }
27876 } // use the main group if it exists
27877
27878 } else if (groups.main) {
27879 variants = groups.main; // only one group, use that one
27880 } else if (groupKeys.length === 1) {
27881 variants = groups[groupKeys[0]];
27882 }
27883 }
27884
27885 if (typeof track === 'undefined') {
27886 return variants;
27887 }
27888
27889 if (track === null || !variants) {
27890 // An active track was specified so a corresponding group is expected. track === null
27891 // means no track is currently active so there is no corresponding group
27892 return null;
27893 }
27894
27895 return variants.filter(function (props) {
27896 return props.id === track.id;
27897 })[0] || null;
27898 };
27899 };
27900 var activeTrack = {
27901 /**
27902 * Returns a function used to get the active track of type provided
27903 *
27904 * @param {string} type
27905 * MediaGroup type
27906 * @param {Object} settings
27907 * Object containing required information for media groups
27908 * @return {Function}
27909 * Function that returns the active media track for the provided type. Returns
27910 * null if no track is active
27911 * @function activeTrack.AUDIO
27912 */
27913 AUDIO: function AUDIO(type, settings) {
27914 return function () {
27915 var tracks = settings.mediaTypes[type].tracks;
27916
27917 for (var id in tracks) {
27918 if (tracks[id].enabled) {
27919 return tracks[id];
27920 }
27921 }
27922
27923 return null;
27924 };
27925 },
27926
27927 /**
27928 * Returns a function used to get the active track of type provided
27929 *
27930 * @param {string} type
27931 * MediaGroup type
27932 * @param {Object} settings
27933 * Object containing required information for media groups
27934 * @return {Function}
27935 * Function that returns the active media track for the provided type. Returns
27936 * null if no track is active
27937 * @function activeTrack.SUBTITLES
27938 */
27939 SUBTITLES: function SUBTITLES(type, settings) {
27940 return function () {
27941 var tracks = settings.mediaTypes[type].tracks;
27942
27943 for (var id in tracks) {
27944 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
27945 return tracks[id];
27946 }
27947 }
27948
27949 return null;
27950 };
27951 }
27952 };
27953 var getActiveGroup = function getActiveGroup(type, _ref) {
27954 var mediaTypes = _ref.mediaTypes;
27955 return function () {
27956 var activeTrack_ = mediaTypes[type].activeTrack();
27957
27958 if (!activeTrack_) {
27959 return null;
27960 }
27961
27962 return mediaTypes[type].activeGroup(activeTrack_);
27963 };
27964 };
27965 /**
27966 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
27967 * Closed-Captions) specified in the master manifest.
27968 *
27969 * @param {Object} settings
27970 * Object containing required information for setting up the media groups
27971 * @param {Tech} settings.tech
27972 * The tech of the player
27973 * @param {Object} settings.requestOptions
27974 * XHR request options used by the segment loaders
27975 * @param {PlaylistLoader} settings.masterPlaylistLoader
27976 * PlaylistLoader for the master source
27977 * @param {VhsHandler} settings.vhs
27978 * VHS SourceHandler
27979 * @param {Object} settings.master
27980 * The parsed master manifest
27981 * @param {Object} settings.mediaTypes
27982 * Object to store the loaders, tracks, and utility methods for each media type
27983 * @param {Function} settings.blacklistCurrentPlaylist
27984 * Blacklists the current rendition and forces a rendition switch.
27985 * @function setupMediaGroups
27986 */
27987
27988 var setupMediaGroups = function setupMediaGroups(settings) {
27989 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
27990 initialize[type](type, settings);
27991 });
27992 var mediaTypes = settings.mediaTypes,
27993 masterPlaylistLoader = settings.masterPlaylistLoader,
27994 tech = settings.tech,
27995 vhs = settings.vhs,
27996 _settings$segmentLoad3 = settings.segmentLoaders,
27997 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
27998 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
27999
28000 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28001 mediaTypes[type].activeGroup = activeGroup(type, settings);
28002 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
28003 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
28004 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
28005 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
28006 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
28007 }); // DO NOT enable the default subtitle or caption track.
28008 // DO enable the default audio track
28009
28010 var audioGroup = mediaTypes.AUDIO.activeGroup();
28011
28012 if (audioGroup) {
28013 var groupId = (audioGroup.filter(function (group) {
28014 return group.default;
28015 })[0] || audioGroup[0]).id;
28016 mediaTypes.AUDIO.tracks[groupId].enabled = true;
28017 mediaTypes.AUDIO.onGroupChanged();
28018 mediaTypes.AUDIO.onTrackChanged();
28019 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
28020 // track is changed, but needs to be handled here since the track may not be considered
28021 // changed on the first call to onTrackChanged
28022
28023 if (!activeAudioGroup.playlistLoader) {
28024 // either audio is muxed with video or the stream is audio only
28025 mainSegmentLoader.setAudio(true);
28026 } else {
28027 // audio is demuxed
28028 mainSegmentLoader.setAudio(false);
28029 audioSegmentLoader.setAudio(true);
28030 }
28031 }
28032
28033 masterPlaylistLoader.on('mediachange', function () {
28034 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28035 return mediaTypes[type].onGroupChanged();
28036 });
28037 });
28038 masterPlaylistLoader.on('mediachanging', function () {
28039 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28040 return mediaTypes[type].onGroupChanging();
28041 });
28042 }); // custom audio track change event handler for usage event
28043
28044 var onAudioTrackChanged = function onAudioTrackChanged() {
28045 mediaTypes.AUDIO.onTrackChanged();
28046 tech.trigger({
28047 type: 'usage',
28048 name: 'vhs-audio-change'
28049 });
28050 tech.trigger({
28051 type: 'usage',
28052 name: 'hls-audio-change'
28053 });
28054 };
28055
28056 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
28057 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28058 vhs.on('dispose', function () {
28059 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
28060 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28061 }); // clear existing audio tracks and add the ones we just created
28062
28063 tech.clearTracks('audio');
28064
28065 for (var id in mediaTypes.AUDIO.tracks) {
28066 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
28067 }
28068 };
28069 /**
28070 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
28071 * media type
28072 *
28073 * @return {Object}
28074 * Object to store the loaders, tracks, and utility methods for each media type
28075 * @function createMediaTypes
28076 */
28077
28078 var createMediaTypes = function createMediaTypes() {
28079 var mediaTypes = {};
28080 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
28081 mediaTypes[type] = {
28082 groups: {},
28083 tracks: {},
28084 activePlaylistLoader: null,
28085 activeGroup: noop,
28086 activeTrack: noop,
28087 getActiveGroup: noop,
28088 onGroupChanged: noop,
28089 onTrackChanged: noop,
28090 lastTrack_: null,
28091 logger_: logger("MediaGroups[" + type + "]")
28092 };
28093 });
28094 return mediaTypes;
28095 };
28096
28097 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
28098 var Vhs$1; // SegmentLoader stats that need to have each loader's
28099 // values summed to calculate the final value
28100
28101 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
28102
28103 var sumLoaderStat = function sumLoaderStat(stat) {
28104 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
28105 };
28106
28107 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
28108 var currentPlaylist = _ref.currentPlaylist,
28109 buffered = _ref.buffered,
28110 currentTime = _ref.currentTime,
28111 nextPlaylist = _ref.nextPlaylist,
28112 bufferLowWaterLine = _ref.bufferLowWaterLine,
28113 bufferHighWaterLine = _ref.bufferHighWaterLine,
28114 duration = _ref.duration,
28115 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
28116 log = _ref.log;
28117
28118 // we have no other playlist to switch to
28119 if (!nextPlaylist) {
28120 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
28121 return false;
28122 }
28123
28124 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
28125
28126 if (!currentPlaylist) {
28127 log(sharedLogLine + " as current playlist is not set");
28128 return true;
28129 } // no need to switch if playlist is the same
28130
28131
28132 if (nextPlaylist.id === currentPlaylist.id) {
28133 return false;
28134 } // determine if current time is in a buffered range.
28135
28136
28137 var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
28138 // This is because in LIVE, the player plays 3 segments from the end of the
28139 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
28140 // in those segments, a viewer will never experience a rendition upswitch.
28141
28142 if (!currentPlaylist.endList) {
28143 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
28144 // doubles the time to first playback.
28145 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
28146 log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
28147 return false;
28148 }
28149
28150 log(sharedLogLine + " as current playlist is live");
28151 return true;
28152 }
28153
28154 var forwardBuffer = timeAheadOf(buffered, currentTime);
28155 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
28156 // duration is below the max potential low water line
28157
28158 if (duration < maxBufferLowWaterLine) {
28159 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
28160 return true;
28161 }
28162
28163 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
28164 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
28165 // we can switch down
28166
28167 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
28168 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
28169
28170 if (experimentalBufferBasedABR) {
28171 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
28172 }
28173
28174 log(logLine);
28175 return true;
28176 } // and if our buffer is higher than the low water line,
28177 // we can switch up
28178
28179
28180 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
28181 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
28182
28183 if (experimentalBufferBasedABR) {
28184 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
28185 }
28186
28187 log(_logLine);
28188 return true;
28189 }
28190
28191 log("not " + sharedLogLine + " as no switching criteria met");
28192 return false;
28193 };
28194 /**
28195 * the master playlist controller controller all interactons
28196 * between playlists and segmentloaders. At this time this mainly
28197 * involves a master playlist and a series of audio playlists
28198 * if they are available
28199 *
28200 * @class MasterPlaylistController
28201 * @extends videojs.EventTarget
28202 */
28203
28204
28205 var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
28206 inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
28207
28208 function MasterPlaylistController(options) {
28209 var _this;
28210
28211 _this = _videojs$EventTarget.call(this) || this;
28212 var src = options.src,
28213 handleManifestRedirects = options.handleManifestRedirects,
28214 withCredentials = options.withCredentials,
28215 tech = options.tech,
28216 bandwidth = options.bandwidth,
28217 externVhs = options.externVhs,
28218 useCueTags = options.useCueTags,
28219 blacklistDuration = options.blacklistDuration,
28220 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
28221 sourceType = options.sourceType,
28222 cacheEncryptionKeys = options.cacheEncryptionKeys,
28223 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
28224 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
28225 captionServices = options.captionServices;
28226
28227 if (!src) {
28228 throw new Error('A non-empty playlist URL or JSON manifest string is required');
28229 }
28230
28231 var maxPlaylistRetries = options.maxPlaylistRetries;
28232
28233 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
28234 maxPlaylistRetries = Infinity;
28235 }
28236
28237 Vhs$1 = externVhs;
28238 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
28239 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
28240 _this.withCredentials = withCredentials;
28241 _this.tech_ = tech;
28242 _this.vhs_ = tech.vhs;
28243 _this.sourceType_ = sourceType;
28244 _this.useCueTags_ = useCueTags;
28245 _this.blacklistDuration = blacklistDuration;
28246 _this.maxPlaylistRetries = maxPlaylistRetries;
28247 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
28248
28249 if (_this.useCueTags_) {
28250 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
28251 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
28252 }
28253
28254 _this.requestOptions_ = {
28255 withCredentials: withCredentials,
28256 handleManifestRedirects: handleManifestRedirects,
28257 maxPlaylistRetries: maxPlaylistRetries,
28258 timeout: null
28259 };
28260
28261 _this.on('error', _this.pauseLoading);
28262
28263 _this.mediaTypes_ = createMediaTypes();
28264 _this.mediaSource = new window.MediaSource();
28265 _this.handleDurationChange_ = _this.handleDurationChange_.bind(assertThisInitialized(_this));
28266 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(assertThisInitialized(_this));
28267 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(assertThisInitialized(_this));
28268
28269 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
28270
28271
28272 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
28273
28274 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
28275 // everything, and the MediaSource should not be detached without a proper disposal
28276
28277
28278 _this.seekable_ = videojs__default["default"].createTimeRanges();
28279 _this.hasPlayed_ = false;
28280 _this.syncController_ = new SyncController(options);
28281 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
28282 kind: 'metadata',
28283 label: 'segment-metadata'
28284 }, false).track;
28285 _this.decrypter_ = new Decrypter();
28286 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
28287 _this.inbandTextTracks_ = {};
28288 _this.timelineChangeController_ = new TimelineChangeController();
28289 var segmentLoaderSettings = {
28290 vhs: _this.vhs_,
28291 parse708captions: options.parse708captions,
28292 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
28293 captionServices: captionServices,
28294 mediaSource: _this.mediaSource,
28295 currentTime: _this.tech_.currentTime.bind(_this.tech_),
28296 seekable: function seekable() {
28297 return _this.seekable();
28298 },
28299 seeking: function seeking() {
28300 return _this.tech_.seeking();
28301 },
28302 duration: function duration() {
28303 return _this.duration();
28304 },
28305 hasPlayed: function hasPlayed() {
28306 return _this.hasPlayed_;
28307 },
28308 goalBufferLength: function goalBufferLength() {
28309 return _this.goalBufferLength();
28310 },
28311 bandwidth: bandwidth,
28312 syncController: _this.syncController_,
28313 decrypter: _this.decrypter_,
28314 sourceType: _this.sourceType_,
28315 inbandTextTracks: _this.inbandTextTracks_,
28316 cacheEncryptionKeys: cacheEncryptionKeys,
28317 sourceUpdater: _this.sourceUpdater_,
28318 timelineChangeController: _this.timelineChangeController_,
28319 experimentalExactManifestTimings: options.experimentalExactManifestTimings
28320 }; // The source type check not only determines whether a special DASH playlist loader
28321 // should be used, but also covers the case where the provided src is a vhs-json
28322 // manifest object (instead of a URL). In the case of vhs-json, the default
28323 // PlaylistLoader should be used.
28324
28325 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
28326
28327 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
28328 // combined audio/video or just video when alternate audio track is selected
28329
28330
28331 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28332 segmentMetadataTrack: _this.segmentMetadataTrack_,
28333 loaderType: 'main'
28334 }), options); // alternate audio track
28335
28336 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28337 loaderType: 'audio'
28338 }), options);
28339 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28340 loaderType: 'vtt',
28341 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
28342 }), options);
28343
28344 _this.setupSegmentLoaderListeners_();
28345
28346 if (_this.experimentalBufferBasedABR) {
28347 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
28348 return _this.startABRTimer_();
28349 });
28350
28351 _this.tech_.on('pause', function () {
28352 return _this.stopABRTimer_();
28353 });
28354
28355 _this.tech_.on('play', function () {
28356 return _this.startABRTimer_();
28357 });
28358 } // Create SegmentLoader stat-getters
28359 // mediaRequests_
28360 // mediaRequestsAborted_
28361 // mediaRequestsTimedout_
28362 // mediaRequestsErrored_
28363 // mediaTransferDuration_
28364 // mediaBytesTransferred_
28365 // mediaAppends_
28366
28367
28368 loaderStats.forEach(function (stat) {
28369 _this[stat + '_'] = sumLoaderStat.bind(assertThisInitialized(_this), stat);
28370 });
28371 _this.logger_ = logger('MPC');
28372 _this.triggeredFmp4Usage = false;
28373
28374 if (_this.tech_.preload() === 'none') {
28375 _this.loadOnPlay_ = function () {
28376 _this.loadOnPlay_ = null;
28377
28378 _this.masterPlaylistLoader_.load();
28379 };
28380
28381 _this.tech_.one('play', _this.loadOnPlay_);
28382 } else {
28383 _this.masterPlaylistLoader_.load();
28384 }
28385
28386 _this.timeToLoadedData__ = -1;
28387 _this.mainAppendsToLoadedData__ = -1;
28388 _this.audioAppendsToLoadedData__ = -1;
28389 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
28390
28391 _this.tech_.one(event, function () {
28392 var timeToLoadedDataStart = Date.now();
28393
28394 _this.tech_.one('loadeddata', function () {
28395 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
28396 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
28397 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
28398 });
28399 });
28400
28401 return _this;
28402 }
28403
28404 var _proto = MasterPlaylistController.prototype;
28405
28406 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
28407 return this.mainAppendsToLoadedData__;
28408 };
28409
28410 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
28411 return this.audioAppendsToLoadedData__;
28412 };
28413
28414 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
28415 var main = this.mainAppendsToLoadedData_();
28416 var audio = this.audioAppendsToLoadedData_();
28417
28418 if (main === -1 || audio === -1) {
28419 return -1;
28420 }
28421
28422 return main + audio;
28423 };
28424
28425 _proto.timeToLoadedData_ = function timeToLoadedData_() {
28426 return this.timeToLoadedData__;
28427 }
28428 /**
28429 * Run selectPlaylist and switch to the new playlist if we should
28430 *
28431 * @private
28432 *
28433 */
28434 ;
28435
28436 _proto.checkABR_ = function checkABR_() {
28437 var nextPlaylist = this.selectPlaylist();
28438
28439 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
28440 this.switchMedia_(nextPlaylist, 'abr');
28441 }
28442 };
28443
28444 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
28445 var oldMedia = this.media();
28446 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
28447 var newId = playlist.id || playlist.uri;
28448
28449 if (oldId && oldId !== newId) {
28450 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
28451 this.tech_.trigger({
28452 type: 'usage',
28453 name: "vhs-rendition-change-" + cause
28454 });
28455 }
28456
28457 this.masterPlaylistLoader_.media(playlist, delay);
28458 }
28459 /**
28460 * Start a timer that periodically calls checkABR_
28461 *
28462 * @private
28463 */
28464 ;
28465
28466 _proto.startABRTimer_ = function startABRTimer_() {
28467 var _this2 = this;
28468
28469 this.stopABRTimer_();
28470 this.abrTimer_ = window.setInterval(function () {
28471 return _this2.checkABR_();
28472 }, 250);
28473 }
28474 /**
28475 * Stop the timer that periodically calls checkABR_
28476 *
28477 * @private
28478 */
28479 ;
28480
28481 _proto.stopABRTimer_ = function stopABRTimer_() {
28482 // if we're scrubbing, we don't need to pause.
28483 // This getter will be added to Video.js in version 7.11.
28484 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
28485 return;
28486 }
28487
28488 window.clearInterval(this.abrTimer_);
28489 this.abrTimer_ = null;
28490 }
28491 /**
28492 * Get a list of playlists for the currently selected audio playlist
28493 *
28494 * @return {Array} the array of audio playlists
28495 */
28496 ;
28497
28498 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
28499 var master = this.master();
28500 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
28501 // assume that the audio tracks are contained in masters
28502 // playlist array, use that or an empty array.
28503
28504 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
28505 return defaultPlaylists;
28506 }
28507
28508 var AUDIO = master.mediaGroups.AUDIO;
28509 var groupKeys = Object.keys(AUDIO);
28510 var track; // get the current active track
28511
28512 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
28513 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
28514 } else {
28515 // default group is `main` or just the first group.
28516 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
28517
28518 for (var label in defaultGroup) {
28519 if (defaultGroup[label].default) {
28520 track = {
28521 label: label
28522 };
28523 break;
28524 }
28525 }
28526 } // no active track no playlists.
28527
28528
28529 if (!track) {
28530 return defaultPlaylists;
28531 }
28532
28533 var playlists = []; // get all of the playlists that are possible for the
28534 // active track.
28535
28536 for (var group in AUDIO) {
28537 if (AUDIO[group][track.label]) {
28538 var properties = AUDIO[group][track.label];
28539
28540 if (properties.playlists && properties.playlists.length) {
28541 playlists.push.apply(playlists, properties.playlists);
28542 } else if (properties.uri) {
28543 playlists.push(properties);
28544 } else if (master.playlists.length) {
28545 // if an audio group does not have a uri
28546 // see if we have main playlists that use it as a group.
28547 // if we do then add those to the playlists list.
28548 for (var i = 0; i < master.playlists.length; i++) {
28549 var playlist = master.playlists[i];
28550
28551 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
28552 playlists.push(playlist);
28553 }
28554 }
28555 }
28556 }
28557 }
28558
28559 if (!playlists.length) {
28560 return defaultPlaylists;
28561 }
28562
28563 return playlists;
28564 }
28565 /**
28566 * Register event handlers on the master playlist loader. A helper
28567 * function for construction time.
28568 *
28569 * @private
28570 */
28571 ;
28572
28573 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
28574 var _this3 = this;
28575
28576 this.masterPlaylistLoader_.on('loadedmetadata', function () {
28577 var media = _this3.masterPlaylistLoader_.media();
28578
28579 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28580 // timeout the request.
28581
28582 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28583 _this3.requestOptions_.timeout = 0;
28584 } else {
28585 _this3.requestOptions_.timeout = requestTimeout;
28586 } // if this isn't a live video and preload permits, start
28587 // downloading segments
28588
28589
28590 if (media.endList && _this3.tech_.preload() !== 'none') {
28591 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28592
28593 _this3.mainSegmentLoader_.load();
28594 }
28595
28596 setupMediaGroups({
28597 sourceType: _this3.sourceType_,
28598 segmentLoaders: {
28599 AUDIO: _this3.audioSegmentLoader_,
28600 SUBTITLES: _this3.subtitleSegmentLoader_,
28601 main: _this3.mainSegmentLoader_
28602 },
28603 tech: _this3.tech_,
28604 requestOptions: _this3.requestOptions_,
28605 masterPlaylistLoader: _this3.masterPlaylistLoader_,
28606 vhs: _this3.vhs_,
28607 master: _this3.master(),
28608 mediaTypes: _this3.mediaTypes_,
28609 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
28610 });
28611
28612 _this3.triggerPresenceUsage_(_this3.master(), media);
28613
28614 _this3.setupFirstPlay();
28615
28616 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
28617 _this3.trigger('selectedinitialmedia');
28618 } else {
28619 // We must wait for the active audio playlist loader to
28620 // finish setting up before triggering this event so the
28621 // representations API and EME setup is correct
28622 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
28623 _this3.trigger('selectedinitialmedia');
28624 });
28625 }
28626 });
28627 this.masterPlaylistLoader_.on('loadedplaylist', function () {
28628 if (_this3.loadOnPlay_) {
28629 _this3.tech_.off('play', _this3.loadOnPlay_);
28630 }
28631
28632 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
28633
28634 if (!updatedPlaylist) {
28635 // exclude any variants that are not supported by the browser before selecting
28636 // an initial media as the playlist selectors do not consider browser support
28637 _this3.excludeUnsupportedVariants_();
28638
28639 var selectedMedia;
28640
28641 if (_this3.enableLowInitialPlaylist) {
28642 selectedMedia = _this3.selectInitialPlaylist();
28643 }
28644
28645 if (!selectedMedia) {
28646 selectedMedia = _this3.selectPlaylist();
28647 }
28648
28649 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
28650 return;
28651 }
28652
28653 _this3.initialMedia_ = selectedMedia;
28654
28655 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
28656 // fire again since the playlist will be requested. In the case of vhs-json
28657 // (where the manifest object is provided as the source), when the media
28658 // playlist's `segments` list is already available, a media playlist won't be
28659 // requested, and loadedplaylist won't fire again, so the playlist handler must be
28660 // called on its own here.
28661
28662
28663 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
28664
28665 if (!haveJsonSource) {
28666 return;
28667 }
28668
28669 updatedPlaylist = _this3.initialMedia_;
28670 }
28671
28672 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
28673 });
28674 this.masterPlaylistLoader_.on('error', function () {
28675 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
28676 });
28677 this.masterPlaylistLoader_.on('mediachanging', function () {
28678 _this3.mainSegmentLoader_.abort();
28679
28680 _this3.mainSegmentLoader_.pause();
28681 });
28682 this.masterPlaylistLoader_.on('mediachange', function () {
28683 var media = _this3.masterPlaylistLoader_.media();
28684
28685 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28686 // timeout the request.
28687
28688 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28689 _this3.requestOptions_.timeout = 0;
28690 } else {
28691 _this3.requestOptions_.timeout = requestTimeout;
28692 } // TODO: Create a new event on the PlaylistLoader that signals
28693 // that the segments have changed in some way and use that to
28694 // update the SegmentLoader instead of doing it twice here and
28695 // on `loadedplaylist`
28696
28697
28698 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28699
28700 _this3.mainSegmentLoader_.load();
28701
28702 _this3.tech_.trigger({
28703 type: 'mediachange',
28704 bubbles: true
28705 });
28706 });
28707 this.masterPlaylistLoader_.on('playlistunchanged', function () {
28708 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
28709 // excluded for not-changing. We likely just have a really slowly updating
28710 // playlist.
28711
28712
28713 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
28714 return;
28715 }
28716
28717 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
28718
28719 if (playlistOutdated) {
28720 // Playlist has stopped updating and we're stuck at its end. Try to
28721 // blacklist it and switch to another playlist in the hope that that
28722 // one is updating (and give the player a chance to re-adjust to the
28723 // safe live point).
28724 _this3.blacklistCurrentPlaylist({
28725 message: 'Playlist no longer updating.',
28726 reason: 'playlist-unchanged'
28727 }); // useful for monitoring QoS
28728
28729
28730 _this3.tech_.trigger('playliststuck');
28731 }
28732 });
28733 this.masterPlaylistLoader_.on('renditiondisabled', function () {
28734 _this3.tech_.trigger({
28735 type: 'usage',
28736 name: 'vhs-rendition-disabled'
28737 });
28738
28739 _this3.tech_.trigger({
28740 type: 'usage',
28741 name: 'hls-rendition-disabled'
28742 });
28743 });
28744 this.masterPlaylistLoader_.on('renditionenabled', function () {
28745 _this3.tech_.trigger({
28746 type: 'usage',
28747 name: 'vhs-rendition-enabled'
28748 });
28749
28750 _this3.tech_.trigger({
28751 type: 'usage',
28752 name: 'hls-rendition-enabled'
28753 });
28754 });
28755 }
28756 /**
28757 * Given an updated media playlist (whether it was loaded for the first time, or
28758 * refreshed for live playlists), update any relevant properties and state to reflect
28759 * changes in the media that should be accounted for (e.g., cues and duration).
28760 *
28761 * @param {Object} updatedPlaylist the updated media playlist object
28762 *
28763 * @private
28764 */
28765 ;
28766
28767 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
28768 if (this.useCueTags_) {
28769 this.updateAdCues_(updatedPlaylist);
28770 } // TODO: Create a new event on the PlaylistLoader that signals
28771 // that the segments have changed in some way and use that to
28772 // update the SegmentLoader instead of doing it twice here and
28773 // on `mediachange`
28774
28775
28776 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
28777 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
28778 // as it is possible that it was temporarily stopped while waiting for
28779 // a playlist (e.g., in case the playlist errored and we re-requested it).
28780
28781 if (!this.tech_.paused()) {
28782 this.mainSegmentLoader_.load();
28783
28784 if (this.audioSegmentLoader_) {
28785 this.audioSegmentLoader_.load();
28786 }
28787 }
28788 }
28789 /**
28790 * A helper function for triggerring presence usage events once per source
28791 *
28792 * @private
28793 */
28794 ;
28795
28796 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
28797 var mediaGroups = master.mediaGroups || {};
28798 var defaultDemuxed = true;
28799 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
28800
28801 for (var mediaGroup in mediaGroups.AUDIO) {
28802 for (var label in mediaGroups.AUDIO[mediaGroup]) {
28803 var properties = mediaGroups.AUDIO[mediaGroup][label];
28804
28805 if (!properties.uri) {
28806 defaultDemuxed = false;
28807 }
28808 }
28809 }
28810
28811 if (defaultDemuxed) {
28812 this.tech_.trigger({
28813 type: 'usage',
28814 name: 'vhs-demuxed'
28815 });
28816 this.tech_.trigger({
28817 type: 'usage',
28818 name: 'hls-demuxed'
28819 });
28820 }
28821
28822 if (Object.keys(mediaGroups.SUBTITLES).length) {
28823 this.tech_.trigger({
28824 type: 'usage',
28825 name: 'vhs-webvtt'
28826 });
28827 this.tech_.trigger({
28828 type: 'usage',
28829 name: 'hls-webvtt'
28830 });
28831 }
28832
28833 if (Vhs$1.Playlist.isAes(media)) {
28834 this.tech_.trigger({
28835 type: 'usage',
28836 name: 'vhs-aes'
28837 });
28838 this.tech_.trigger({
28839 type: 'usage',
28840 name: 'hls-aes'
28841 });
28842 }
28843
28844 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
28845 this.tech_.trigger({
28846 type: 'usage',
28847 name: 'vhs-alternate-audio'
28848 });
28849 this.tech_.trigger({
28850 type: 'usage',
28851 name: 'hls-alternate-audio'
28852 });
28853 }
28854
28855 if (this.useCueTags_) {
28856 this.tech_.trigger({
28857 type: 'usage',
28858 name: 'vhs-playlist-cue-tags'
28859 });
28860 this.tech_.trigger({
28861 type: 'usage',
28862 name: 'hls-playlist-cue-tags'
28863 });
28864 }
28865 };
28866
28867 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
28868 var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
28869 var currentTime = this.tech_.currentTime();
28870 var bufferLowWaterLine = this.bufferLowWaterLine();
28871 var bufferHighWaterLine = this.bufferHighWaterLine();
28872 var buffered = this.tech_.buffered();
28873 return shouldSwitchToMedia({
28874 buffered: buffered,
28875 currentTime: currentTime,
28876 currentPlaylist: currentPlaylist,
28877 nextPlaylist: nextPlaylist,
28878 bufferLowWaterLine: bufferLowWaterLine,
28879 bufferHighWaterLine: bufferHighWaterLine,
28880 duration: this.duration(),
28881 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
28882 log: this.logger_
28883 });
28884 }
28885 /**
28886 * Register event handlers on the segment loaders. A helper function
28887 * for construction time.
28888 *
28889 * @private
28890 */
28891 ;
28892
28893 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
28894 var _this4 = this;
28895
28896 if (!this.experimentalBufferBasedABR) {
28897 this.mainSegmentLoader_.on('bandwidthupdate', function () {
28898 var nextPlaylist = _this4.selectPlaylist();
28899
28900 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
28901 _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
28902 }
28903
28904 _this4.tech_.trigger('bandwidthupdate');
28905 });
28906 this.mainSegmentLoader_.on('progress', function () {
28907 _this4.trigger('progress');
28908 });
28909 }
28910
28911 this.mainSegmentLoader_.on('error', function () {
28912 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
28913 });
28914 this.mainSegmentLoader_.on('appenderror', function () {
28915 _this4.error = _this4.mainSegmentLoader_.error_;
28916
28917 _this4.trigger('error');
28918 });
28919 this.mainSegmentLoader_.on('syncinfoupdate', function () {
28920 _this4.onSyncInfoUpdate_();
28921 });
28922 this.mainSegmentLoader_.on('timestampoffset', function () {
28923 _this4.tech_.trigger({
28924 type: 'usage',
28925 name: 'vhs-timestamp-offset'
28926 });
28927
28928 _this4.tech_.trigger({
28929 type: 'usage',
28930 name: 'hls-timestamp-offset'
28931 });
28932 });
28933 this.audioSegmentLoader_.on('syncinfoupdate', function () {
28934 _this4.onSyncInfoUpdate_();
28935 });
28936 this.audioSegmentLoader_.on('appenderror', function () {
28937 _this4.error = _this4.audioSegmentLoader_.error_;
28938
28939 _this4.trigger('error');
28940 });
28941 this.mainSegmentLoader_.on('ended', function () {
28942 _this4.logger_('main segment loader ended');
28943
28944 _this4.onEndOfStream();
28945 });
28946 this.mainSegmentLoader_.on('earlyabort', function (event) {
28947 // never try to early abort with the new ABR algorithm
28948 if (_this4.experimentalBufferBasedABR) {
28949 return;
28950 }
28951
28952 _this4.delegateLoaders_('all', ['abort']);
28953
28954 _this4.blacklistCurrentPlaylist({
28955 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
28956 }, ABORT_EARLY_BLACKLIST_SECONDS);
28957 });
28958
28959 var updateCodecs = function updateCodecs() {
28960 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
28961 return _this4.tryToCreateSourceBuffers_();
28962 }
28963
28964 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
28965
28966
28967 if (!codecs) {
28968 return;
28969 }
28970
28971 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
28972 };
28973
28974 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
28975 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
28976 this.mainSegmentLoader_.on('fmp4', function () {
28977 if (!_this4.triggeredFmp4Usage) {
28978 _this4.tech_.trigger({
28979 type: 'usage',
28980 name: 'vhs-fmp4'
28981 });
28982
28983 _this4.tech_.trigger({
28984 type: 'usage',
28985 name: 'hls-fmp4'
28986 });
28987
28988 _this4.triggeredFmp4Usage = true;
28989 }
28990 });
28991 this.audioSegmentLoader_.on('fmp4', function () {
28992 if (!_this4.triggeredFmp4Usage) {
28993 _this4.tech_.trigger({
28994 type: 'usage',
28995 name: 'vhs-fmp4'
28996 });
28997
28998 _this4.tech_.trigger({
28999 type: 'usage',
29000 name: 'hls-fmp4'
29001 });
29002
29003 _this4.triggeredFmp4Usage = true;
29004 }
29005 });
29006 this.audioSegmentLoader_.on('ended', function () {
29007 _this4.logger_('audioSegmentLoader ended');
29008
29009 _this4.onEndOfStream();
29010 });
29011 };
29012
29013 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
29014 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
29015 }
29016 /**
29017 * Call load on our SegmentLoaders
29018 */
29019 ;
29020
29021 _proto.load = function load() {
29022 this.mainSegmentLoader_.load();
29023
29024 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29025 this.audioSegmentLoader_.load();
29026 }
29027
29028 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29029 this.subtitleSegmentLoader_.load();
29030 }
29031 }
29032 /**
29033 * Re-tune playback quality level for the current player
29034 * conditions without performing destructive actions, like
29035 * removing already buffered content
29036 *
29037 * @private
29038 * @deprecated
29039 */
29040 ;
29041
29042 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
29043 if (media === void 0) {
29044 media = this.selectPlaylist();
29045 }
29046
29047 this.fastQualityChange_(media);
29048 }
29049 /**
29050 * Re-tune playback quality level for the current player
29051 * conditions. This method will perform destructive actions like removing
29052 * already buffered content in order to readjust the currently active
29053 * playlist quickly. This is good for manual quality changes
29054 *
29055 * @private
29056 */
29057 ;
29058
29059 _proto.fastQualityChange_ = function fastQualityChange_(media) {
29060 var _this5 = this;
29061
29062 if (media === void 0) {
29063 media = this.selectPlaylist();
29064 }
29065
29066 if (media === this.masterPlaylistLoader_.media()) {
29067 this.logger_('skipping fastQualityChange because new media is same as old');
29068 return;
29069 }
29070
29071 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
29072 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
29073 // ahead is roughly the minimum that will accomplish this across a variety of content
29074 // in IE and Edge, but seeking in place is sufficient on all other browsers)
29075 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
29076 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
29077
29078 this.mainSegmentLoader_.resetEverything(function () {
29079 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
29080 // from the previously enabled rendition to load before the new playlist has finished loading
29081 if (videojs__default["default"].browser.IE_VERSION || videojs__default["default"].browser.IS_EDGE) {
29082 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
29083 } else {
29084 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
29085 }
29086 }); // don't need to reset audio as it is reset when media changes
29087 }
29088 /**
29089 * Begin playback.
29090 */
29091 ;
29092
29093 _proto.play = function play() {
29094 if (this.setupFirstPlay()) {
29095 return;
29096 }
29097
29098 if (this.tech_.ended()) {
29099 this.tech_.setCurrentTime(0);
29100 }
29101
29102 if (this.hasPlayed_) {
29103 this.load();
29104 }
29105
29106 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
29107 // seek forward to the live point
29108
29109 if (this.tech_.duration() === Infinity) {
29110 if (this.tech_.currentTime() < seekable.start(0)) {
29111 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
29112 }
29113 }
29114 }
29115 /**
29116 * Seek to the latest media position if this is a live video and the
29117 * player and video are loaded and initialized.
29118 */
29119 ;
29120
29121 _proto.setupFirstPlay = function setupFirstPlay() {
29122 var _this6 = this;
29123
29124 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
29125 // If 1) there is no active media
29126 // 2) the player is paused
29127 // 3) the first play has already been setup
29128 // then exit early
29129
29130 if (!media || this.tech_.paused() || this.hasPlayed_) {
29131 return false;
29132 } // when the video is a live stream
29133
29134
29135 if (!media.endList) {
29136 var seekable = this.seekable();
29137
29138 if (!seekable.length) {
29139 // without a seekable range, the player cannot seek to begin buffering at the live
29140 // point
29141 return false;
29142 }
29143
29144 if (videojs__default["default"].browser.IE_VERSION && this.tech_.readyState() === 0) {
29145 // IE11 throws an InvalidStateError if you try to set currentTime while the
29146 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
29147 this.tech_.one('loadedmetadata', function () {
29148 _this6.trigger('firstplay');
29149
29150 _this6.tech_.setCurrentTime(seekable.end(0));
29151
29152 _this6.hasPlayed_ = true;
29153 });
29154 return false;
29155 } // trigger firstplay to inform the source handler to ignore the next seek event
29156
29157
29158 this.trigger('firstplay'); // seek to the live point
29159
29160 this.tech_.setCurrentTime(seekable.end(0));
29161 }
29162
29163 this.hasPlayed_ = true; // we can begin loading now that everything is ready
29164
29165 this.load();
29166 return true;
29167 }
29168 /**
29169 * handle the sourceopen event on the MediaSource
29170 *
29171 * @private
29172 */
29173 ;
29174
29175 _proto.handleSourceOpen_ = function handleSourceOpen_() {
29176 // Only attempt to create the source buffer if none already exist.
29177 // handleSourceOpen is also called when we are "re-opening" a source buffer
29178 // after `endOfStream` has been called (in response to a seek for instance)
29179 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
29180 // code in video.js but is required because play() must be invoked
29181 // *after* the media source has opened.
29182
29183 if (this.tech_.autoplay()) {
29184 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
29185 // on browsers which return a promise
29186
29187 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
29188 playPromise.then(null, function (e) {});
29189 }
29190 }
29191
29192 this.trigger('sourceopen');
29193 }
29194 /**
29195 * handle the sourceended event on the MediaSource
29196 *
29197 * @private
29198 */
29199 ;
29200
29201 _proto.handleSourceEnded_ = function handleSourceEnded_() {
29202 if (!this.inbandTextTracks_.metadataTrack_) {
29203 return;
29204 }
29205
29206 var cues = this.inbandTextTracks_.metadataTrack_.cues;
29207
29208 if (!cues || !cues.length) {
29209 return;
29210 }
29211
29212 var duration = this.duration();
29213 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
29214 }
29215 /**
29216 * handle the durationchange event on the MediaSource
29217 *
29218 * @private
29219 */
29220 ;
29221
29222 _proto.handleDurationChange_ = function handleDurationChange_() {
29223 this.tech_.trigger('durationchange');
29224 }
29225 /**
29226 * Calls endOfStream on the media source when all active stream types have called
29227 * endOfStream
29228 *
29229 * @param {string} streamType
29230 * Stream type of the segment loader that called endOfStream
29231 * @private
29232 */
29233 ;
29234
29235 _proto.onEndOfStream = function onEndOfStream() {
29236 var isEndOfStream = this.mainSegmentLoader_.ended_;
29237
29238 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29239 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
29240
29241 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
29242 // if we do not know if the main segment loader contains video yet or if we
29243 // definitively know the main segment loader contains video, then we need to wait
29244 // for both main and audio segment loaders to call endOfStream
29245 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
29246 } else {
29247 // otherwise just rely on the audio loader
29248 isEndOfStream = this.audioSegmentLoader_.ended_;
29249 }
29250 }
29251
29252 if (!isEndOfStream) {
29253 return;
29254 }
29255
29256 this.stopABRTimer_();
29257 this.sourceUpdater_.endOfStream();
29258 }
29259 /**
29260 * Check if a playlist has stopped being updated
29261 *
29262 * @param {Object} playlist the media playlist object
29263 * @return {boolean} whether the playlist has stopped being updated or not
29264 */
29265 ;
29266
29267 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
29268 var seekable = this.seekable();
29269
29270 if (!seekable.length) {
29271 // playlist doesn't have enough information to determine whether we are stuck
29272 return false;
29273 }
29274
29275 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
29276
29277 if (expired === null) {
29278 return false;
29279 } // does not use the safe live end to calculate playlist end, since we
29280 // don't want to say we are stuck while there is still content
29281
29282
29283 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
29284 var currentTime = this.tech_.currentTime();
29285 var buffered = this.tech_.buffered();
29286
29287 if (!buffered.length) {
29288 // return true if the playhead reached the absolute end of the playlist
29289 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
29290 }
29291
29292 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
29293 // end of playlist
29294
29295 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
29296 }
29297 /**
29298 * Blacklists a playlist when an error occurs for a set amount of time
29299 * making it unavailable for selection by the rendition selection algorithm
29300 * and then forces a new playlist (rendition) selection.
29301 *
29302 * @param {Object=} error an optional error that may include the playlist
29303 * to blacklist
29304 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
29305 * playlist
29306 */
29307 ;
29308
29309 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
29310 if (error === void 0) {
29311 error = {};
29312 }
29313
29314 // If the `error` was generated by the playlist loader, it will contain
29315 // the playlist we were trying to load (but failed) and that should be
29316 // blacklisted instead of the currently selected playlist which is likely
29317 // out-of-date in this scenario
29318 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
29319 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
29320 // trying to load the master OR while we were disposing of the tech
29321
29322 if (!currentPlaylist) {
29323 this.error = error;
29324
29325 if (this.mediaSource.readyState !== 'open') {
29326 this.trigger('error');
29327 } else {
29328 this.sourceUpdater_.endOfStream('network');
29329 }
29330
29331 return;
29332 }
29333
29334 currentPlaylist.playlistErrors_++;
29335 var playlists = this.masterPlaylistLoader_.master.playlists;
29336 var enabledPlaylists = playlists.filter(isEnabled);
29337 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
29338 // forever
29339
29340 if (playlists.length === 1 && blacklistDuration !== Infinity) {
29341 videojs__default["default"].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
29342 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
29343
29344 return this.masterPlaylistLoader_.load(isFinalRendition);
29345 }
29346
29347 if (isFinalRendition) {
29348 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
29349 // it, instead of erring the player or retrying this playlist, clear out the current
29350 // blacklist. This allows other playlists to be attempted in case any have been
29351 // fixed.
29352 var reincluded = false;
29353 playlists.forEach(function (playlist) {
29354 // skip current playlist which is about to be blacklisted
29355 if (playlist === currentPlaylist) {
29356 return;
29357 }
29358
29359 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
29360
29361 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
29362 reincluded = true;
29363 delete playlist.excludeUntil;
29364 }
29365 });
29366
29367 if (reincluded) {
29368 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
29369 // playlist. This is needed for users relying on the retryplaylist event to catch a
29370 // case where the player might be stuck and looping through "dead" playlists.
29371
29372 this.tech_.trigger('retryplaylist');
29373 }
29374 } // Blacklist this playlist
29375
29376
29377 var excludeUntil;
29378
29379 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
29380 excludeUntil = Infinity;
29381 } else {
29382 excludeUntil = Date.now() + blacklistDuration * 1000;
29383 }
29384
29385 currentPlaylist.excludeUntil = excludeUntil;
29386
29387 if (error.reason) {
29388 currentPlaylist.lastExcludeReason_ = error.reason;
29389 }
29390
29391 this.tech_.trigger('blacklistplaylist');
29392 this.tech_.trigger({
29393 type: 'usage',
29394 name: 'vhs-rendition-blacklisted'
29395 });
29396 this.tech_.trigger({
29397 type: 'usage',
29398 name: 'hls-rendition-blacklisted'
29399 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
29400 // Would be something like media().id !=== currentPlaylist.id and we would need something
29401 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
29402 // from loading a new playlist on any blacklist.
29403 // Select a new playlist
29404
29405 var nextPlaylist = this.selectPlaylist();
29406
29407 if (!nextPlaylist) {
29408 this.error = 'Playback cannot continue. No available working or supported playlists.';
29409 this.trigger('error');
29410 return;
29411 }
29412
29413 var logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
29414 var errorMessage = error.message ? ' ' + error.message : '';
29415 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
29416
29417 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
29418 this.delegateLoaders_('audio', ['abort', 'pause']);
29419 } // if subtitle group changed reset subtitle loaders
29420
29421
29422 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
29423 this.delegateLoaders_('subtitle', ['abort', 'pause']);
29424 }
29425
29426 this.delegateLoaders_('main', ['abort', 'pause']);
29427 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
29428 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
29429
29430 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
29431 }
29432 /**
29433 * Pause all segment/playlist loaders
29434 */
29435 ;
29436
29437 _proto.pauseLoading = function pauseLoading() {
29438 this.delegateLoaders_('all', ['abort', 'pause']);
29439 this.stopABRTimer_();
29440 }
29441 /**
29442 * Call a set of functions in order on playlist loaders, segment loaders,
29443 * or both types of loaders.
29444 *
29445 * @param {string} filter
29446 * Filter loaders that should call fnNames using a string. Can be:
29447 * * all - run on all loaders
29448 * * audio - run on all audio loaders
29449 * * subtitle - run on all subtitle loaders
29450 * * main - run on the main/master loaders
29451 *
29452 * @param {Array|string} fnNames
29453 * A string or array of function names to call.
29454 */
29455 ;
29456
29457 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
29458 var _this7 = this;
29459
29460 var loaders = [];
29461 var dontFilterPlaylist = filter === 'all';
29462
29463 if (dontFilterPlaylist || filter === 'main') {
29464 loaders.push(this.masterPlaylistLoader_);
29465 }
29466
29467 var mediaTypes = [];
29468
29469 if (dontFilterPlaylist || filter === 'audio') {
29470 mediaTypes.push('AUDIO');
29471 }
29472
29473 if (dontFilterPlaylist || filter === 'subtitle') {
29474 mediaTypes.push('CLOSED-CAPTIONS');
29475 mediaTypes.push('SUBTITLES');
29476 }
29477
29478 mediaTypes.forEach(function (mediaType) {
29479 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
29480
29481 if (loader) {
29482 loaders.push(loader);
29483 }
29484 });
29485 ['main', 'audio', 'subtitle'].forEach(function (name) {
29486 var loader = _this7[name + "SegmentLoader_"];
29487
29488 if (loader && (filter === name || filter === 'all')) {
29489 loaders.push(loader);
29490 }
29491 });
29492 loaders.forEach(function (loader) {
29493 return fnNames.forEach(function (fnName) {
29494 if (typeof loader[fnName] === 'function') {
29495 loader[fnName]();
29496 }
29497 });
29498 });
29499 }
29500 /**
29501 * set the current time on all segment loaders
29502 *
29503 * @param {TimeRange} currentTime the current time to set
29504 * @return {TimeRange} the current time
29505 */
29506 ;
29507
29508 _proto.setCurrentTime = function setCurrentTime(currentTime) {
29509 var buffered = findRange(this.tech_.buffered(), currentTime);
29510
29511 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
29512 // return immediately if the metadata is not ready yet
29513 return 0;
29514 } // it's clearly an edge-case but don't thrown an error if asked to
29515 // seek within an empty playlist
29516
29517
29518 if (!this.masterPlaylistLoader_.media().segments) {
29519 return 0;
29520 } // if the seek location is already buffered, continue buffering as usual
29521
29522
29523 if (buffered && buffered.length) {
29524 return currentTime;
29525 } // cancel outstanding requests so we begin buffering at the new
29526 // location
29527
29528
29529 this.mainSegmentLoader_.resetEverything();
29530 this.mainSegmentLoader_.abort();
29531
29532 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29533 this.audioSegmentLoader_.resetEverything();
29534 this.audioSegmentLoader_.abort();
29535 }
29536
29537 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29538 this.subtitleSegmentLoader_.resetEverything();
29539 this.subtitleSegmentLoader_.abort();
29540 } // start segment loader loading in case they are paused
29541
29542
29543 this.load();
29544 }
29545 /**
29546 * get the current duration
29547 *
29548 * @return {TimeRange} the duration
29549 */
29550 ;
29551
29552 _proto.duration = function duration() {
29553 if (!this.masterPlaylistLoader_) {
29554 return 0;
29555 }
29556
29557 var media = this.masterPlaylistLoader_.media();
29558
29559 if (!media) {
29560 // no playlists loaded yet, so can't determine a duration
29561 return 0;
29562 } // Don't rely on the media source for duration in the case of a live playlist since
29563 // setting the native MediaSource's duration to infinity ends up with consequences to
29564 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29565 //
29566 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29567 // however, few browsers have support for setLiveSeekableRange()
29568 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29569 //
29570 // Until a time when the duration of the media source can be set to infinity, and a
29571 // seekable range specified across browsers, just return Infinity.
29572
29573
29574 if (!media.endList) {
29575 return Infinity;
29576 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
29577 // available). If it's not available, fall back to a playlist-calculated estimate.
29578
29579
29580 if (this.mediaSource) {
29581 return this.mediaSource.duration;
29582 }
29583
29584 return Vhs$1.Playlist.duration(media);
29585 }
29586 /**
29587 * check the seekable range
29588 *
29589 * @return {TimeRange} the seekable range
29590 */
29591 ;
29592
29593 _proto.seekable = function seekable() {
29594 return this.seekable_;
29595 };
29596
29597 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
29598 var audioSeekable; // TODO check for creation of both source buffers before updating seekable
29599 //
29600 // A fix was made to this function where a check for
29601 // this.sourceUpdater_.hasCreatedSourceBuffers
29602 // was added to ensure that both source buffers were created before seekable was
29603 // updated. However, it originally had a bug where it was checking for a true and
29604 // returning early instead of checking for false. Setting it to check for false to
29605 // return early though created other issues. A call to play() would check for seekable
29606 // end without verifying that a seekable range was present. In addition, even checking
29607 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
29608 // due to a media update calling load on the segment loaders, skipping a seek to live,
29609 // thereby starting live streams at the beginning of the stream rather than at the end.
29610 //
29611 // This conditional should be fixed to wait for the creation of two source buffers at
29612 // the same time as the other sections of code are fixed to properly seek to live and
29613 // not throw an error due to checking for a seekable end when no seekable range exists.
29614 //
29615 // For now, fall back to the older behavior, with the understanding that the seekable
29616 // range may not be completely correct, leading to a suboptimal initial live point.
29617
29618 if (!this.masterPlaylistLoader_) {
29619 return;
29620 }
29621
29622 var media = this.masterPlaylistLoader_.media();
29623
29624 if (!media) {
29625 return;
29626 }
29627
29628 var expired = this.syncController_.getExpiredTime(media, this.duration());
29629
29630 if (expired === null) {
29631 // not enough information to update seekable
29632 return;
29633 }
29634
29635 var master = this.masterPlaylistLoader_.master;
29636 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29637
29638 if (mainSeekable.length === 0) {
29639 return;
29640 }
29641
29642 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29643 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
29644 expired = this.syncController_.getExpiredTime(media, this.duration());
29645
29646 if (expired === null) {
29647 return;
29648 }
29649
29650 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29651
29652 if (audioSeekable.length === 0) {
29653 return;
29654 }
29655 }
29656
29657 var oldEnd;
29658 var oldStart;
29659
29660 if (this.seekable_ && this.seekable_.length) {
29661 oldEnd = this.seekable_.end(0);
29662 oldStart = this.seekable_.start(0);
29663 }
29664
29665 if (!audioSeekable) {
29666 // seekable has been calculated based on buffering video data so it
29667 // can be returned directly
29668 this.seekable_ = mainSeekable;
29669 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
29670 // seekables are pretty far off, rely on main
29671 this.seekable_ = mainSeekable;
29672 } else {
29673 this.seekable_ = videojs__default["default"].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
29674 } // seekable is the same as last time
29675
29676
29677 if (this.seekable_ && this.seekable_.length) {
29678 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
29679 return;
29680 }
29681 }
29682
29683 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
29684 this.tech_.trigger('seekablechanged');
29685 }
29686 /**
29687 * Update the player duration
29688 */
29689 ;
29690
29691 _proto.updateDuration = function updateDuration(isLive) {
29692 if (this.updateDuration_) {
29693 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29694 this.updateDuration_ = null;
29695 }
29696
29697 if (this.mediaSource.readyState !== 'open') {
29698 this.updateDuration_ = this.updateDuration.bind(this, isLive);
29699 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
29700 return;
29701 }
29702
29703 if (isLive) {
29704 var seekable = this.seekable();
29705
29706 if (!seekable.length) {
29707 return;
29708 } // Even in the case of a live playlist, the native MediaSource's duration should not
29709 // be set to Infinity (even though this would be expected for a live playlist), since
29710 // setting the native MediaSource's duration to infinity ends up with consequences to
29711 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29712 //
29713 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29714 // however, few browsers have support for setLiveSeekableRange()
29715 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29716 //
29717 // Until a time when the duration of the media source can be set to infinity, and a
29718 // seekable range specified across browsers, the duration should be greater than or
29719 // equal to the last possible seekable value.
29720 // MediaSource duration starts as NaN
29721 // It is possible (and probable) that this case will never be reached for many
29722 // sources, since the MediaSource reports duration as the highest value without
29723 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
29724 // we buffered times 0 to 100 with real times of 100 to 200, even though current
29725 // time will be between 0 and 100, the native media source may report the duration
29726 // as 200. However, since we report duration separate from the media source (as
29727 // Infinity), and as long as the native media source duration value is greater than
29728 // our reported seekable range, seeks will work as expected. The large number as
29729 // duration for live is actually a strategy used by some players to work around the
29730 // issue of live seekable ranges cited above.
29731
29732
29733 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
29734 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
29735 }
29736
29737 return;
29738 }
29739
29740 var buffered = this.tech_.buffered();
29741 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
29742
29743 if (buffered.length > 0) {
29744 duration = Math.max(duration, buffered.end(buffered.length - 1));
29745 }
29746
29747 if (this.mediaSource.duration !== duration) {
29748 this.sourceUpdater_.setDuration(duration);
29749 }
29750 }
29751 /**
29752 * dispose of the MasterPlaylistController and everything
29753 * that it controls
29754 */
29755 ;
29756
29757 _proto.dispose = function dispose() {
29758 var _this8 = this;
29759
29760 this.trigger('dispose');
29761 this.decrypter_.terminate();
29762 this.masterPlaylistLoader_.dispose();
29763 this.mainSegmentLoader_.dispose();
29764
29765 if (this.loadOnPlay_) {
29766 this.tech_.off('play', this.loadOnPlay_);
29767 }
29768
29769 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
29770 var groups = _this8.mediaTypes_[type].groups;
29771
29772 for (var id in groups) {
29773 groups[id].forEach(function (group) {
29774 if (group.playlistLoader) {
29775 group.playlistLoader.dispose();
29776 }
29777 });
29778 }
29779 });
29780 this.audioSegmentLoader_.dispose();
29781 this.subtitleSegmentLoader_.dispose();
29782 this.sourceUpdater_.dispose();
29783 this.timelineChangeController_.dispose();
29784 this.stopABRTimer_();
29785
29786 if (this.updateDuration_) {
29787 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29788 }
29789
29790 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
29791
29792 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
29793 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
29794 this.off();
29795 }
29796 /**
29797 * return the master playlist object if we have one
29798 *
29799 * @return {Object} the master playlist object that we parsed
29800 */
29801 ;
29802
29803 _proto.master = function master() {
29804 return this.masterPlaylistLoader_.master;
29805 }
29806 /**
29807 * return the currently selected playlist
29808 *
29809 * @return {Object} the currently selected playlist object that we parsed
29810 */
29811 ;
29812
29813 _proto.media = function media() {
29814 // playlist loader will not return media if it has not been fully loaded
29815 return this.masterPlaylistLoader_.media() || this.initialMedia_;
29816 };
29817
29818 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
29819 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
29820 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
29821 // otherwise check on the segment loader.
29822
29823 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
29824
29825 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
29826 return false;
29827 }
29828
29829 return true;
29830 };
29831
29832 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
29833 var _this9 = this;
29834
29835 var media = {
29836 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
29837 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
29838 }; // set "main" media equal to video
29839
29840 media.video = media.main;
29841 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
29842 var codecs = {};
29843 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
29844
29845 if (media.main.hasVideo) {
29846 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
29847 }
29848
29849 if (media.main.isMuxed) {
29850 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
29851 }
29852
29853 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
29854 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
29855
29856 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
29857 } // no codecs, no playback.
29858
29859
29860 if (!codecs.audio && !codecs.video) {
29861 this.blacklistCurrentPlaylist({
29862 playlist: this.media(),
29863 message: 'Could not determine codecs for playlist.',
29864 blacklistDuration: Infinity
29865 });
29866 return;
29867 } // fmp4 relies on browser support, while ts relies on muxer support
29868
29869
29870 var supportFunction = function supportFunction(isFmp4, codec) {
29871 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
29872 };
29873
29874 var unsupportedCodecs = {};
29875 var unsupportedAudio;
29876 ['video', 'audio'].forEach(function (type) {
29877 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
29878 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
29879 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
29880 unsupportedCodecs[supporter].push(codecs[type]);
29881
29882 if (type === 'audio') {
29883 unsupportedAudio = supporter;
29884 }
29885 }
29886 });
29887
29888 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
29889 var audioGroup = this.media().attributes.AUDIO;
29890 this.master().playlists.forEach(function (variant) {
29891 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
29892
29893 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
29894 variant.excludeUntil = Infinity;
29895 }
29896 });
29897 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
29898 } // if we have any unsupported codecs blacklist this playlist.
29899
29900
29901 if (Object.keys(unsupportedCodecs).length) {
29902 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
29903 if (acc) {
29904 acc += ', ';
29905 }
29906
29907 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
29908 return acc;
29909 }, '') + '.';
29910 this.blacklistCurrentPlaylist({
29911 playlist: this.media(),
29912 internal: true,
29913 message: message,
29914 blacklistDuration: Infinity
29915 });
29916 return;
29917 } // check if codec switching is happening
29918
29919
29920 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
29921 var switchMessages = [];
29922 ['video', 'audio'].forEach(function (type) {
29923 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
29924 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
29925
29926 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
29927 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
29928 }
29929 });
29930
29931 if (switchMessages.length) {
29932 this.blacklistCurrentPlaylist({
29933 playlist: this.media(),
29934 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
29935 blacklistDuration: Infinity,
29936 internal: true
29937 });
29938 return;
29939 }
29940 } // TODO: when using the muxer shouldn't we just return
29941 // the codecs that the muxer outputs?
29942
29943
29944 return codecs;
29945 }
29946 /**
29947 * Create source buffers and exlude any incompatible renditions.
29948 *
29949 * @private
29950 */
29951 ;
29952
29953 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
29954 // media source is not ready yet or sourceBuffers are already
29955 // created.
29956 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
29957 return;
29958 }
29959
29960 if (!this.areMediaTypesKnown_()) {
29961 return;
29962 }
29963
29964 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
29965
29966 if (!codecs) {
29967 return;
29968 }
29969
29970 this.sourceUpdater_.createSourceBuffers(codecs);
29971 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
29972 this.excludeIncompatibleVariants_(codecString);
29973 }
29974 /**
29975 * Excludes playlists with codecs that are unsupported by the muxer and browser.
29976 */
29977 ;
29978
29979 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
29980 var _this10 = this;
29981
29982 var playlists = this.master().playlists;
29983 var ids = []; // TODO: why don't we have a property to loop through all
29984 // playlist? Why did we ever mix indexes and keys?
29985
29986 Object.keys(playlists).forEach(function (key) {
29987 var variant = playlists[key]; // check if we already processed this playlist.
29988
29989 if (ids.indexOf(variant.id) !== -1) {
29990 return;
29991 }
29992
29993 ids.push(variant.id);
29994 var codecs = codecsForPlaylist(_this10.master, variant);
29995 var unsupported = [];
29996
29997 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
29998 unsupported.push("audio codec " + codecs.audio);
29999 }
30000
30001 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
30002 unsupported.push("video codec " + codecs.video);
30003 }
30004
30005 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
30006 unsupported.push("text codec " + codecs.text);
30007 }
30008
30009 if (unsupported.length) {
30010 variant.excludeUntil = Infinity;
30011
30012 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
30013 }
30014 });
30015 }
30016 /**
30017 * Blacklist playlists that are known to be codec or
30018 * stream-incompatible with the SourceBuffer configuration. For
30019 * instance, Media Source Extensions would cause the video element to
30020 * stall waiting for video data if you switched from a variant with
30021 * video and audio to an audio-only one.
30022 *
30023 * @param {Object} media a media playlist compatible with the current
30024 * set of SourceBuffers. Variants in the current master playlist that
30025 * do not appear to have compatible codec or stream configurations
30026 * will be excluded from the default playlist selection algorithm
30027 * indefinitely.
30028 * @private
30029 */
30030 ;
30031
30032 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
30033 var _this11 = this;
30034
30035 var ids = [];
30036 var playlists = this.master().playlists;
30037 var codecs = unwrapCodecList(parseCodecs(codecString));
30038 var codecCount_ = codecCount(codecs);
30039 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
30040 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
30041 Object.keys(playlists).forEach(function (key) {
30042 var variant = playlists[key]; // check if we already processed this playlist.
30043 // or it if it is already excluded forever.
30044
30045 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
30046 return;
30047 }
30048
30049 ids.push(variant.id);
30050 var blacklistReasons = []; // get codecs from the playlist for this variant
30051
30052 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
30053 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
30054 // variant is incompatible. Wait for mux.js to probe
30055
30056 if (!variantCodecs.audio && !variantCodecs.video) {
30057 return;
30058 } // TODO: we can support this by removing the
30059 // old media source and creating a new one, but it will take some work.
30060 // The number of streams cannot change
30061
30062
30063 if (variantCodecCount !== codecCount_) {
30064 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
30065 } // only exclude playlists by codec change, if codecs cannot switch
30066 // during playback.
30067
30068
30069 if (!_this11.sourceUpdater_.canChangeType()) {
30070 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
30071 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
30072
30073 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
30074 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
30075 } // the audio codec cannot change
30076
30077
30078 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
30079 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
30080 }
30081 }
30082
30083 if (blacklistReasons.length) {
30084 variant.excludeUntil = Infinity;
30085
30086 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
30087 }
30088 });
30089 };
30090
30091 _proto.updateAdCues_ = function updateAdCues_(media) {
30092 var offset = 0;
30093 var seekable = this.seekable();
30094
30095 if (seekable.length) {
30096 offset = seekable.start(0);
30097 }
30098
30099 updateAdCues(media, this.cueTagsTrack_, offset);
30100 }
30101 /**
30102 * Calculates the desired forward buffer length based on current time
30103 *
30104 * @return {number} Desired forward buffer length in seconds
30105 */
30106 ;
30107
30108 _proto.goalBufferLength = function goalBufferLength() {
30109 var currentTime = this.tech_.currentTime();
30110 var initial = Config.GOAL_BUFFER_LENGTH;
30111 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
30112 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
30113 return Math.min(initial + currentTime * rate, max);
30114 }
30115 /**
30116 * Calculates the desired buffer low water line based on current time
30117 *
30118 * @return {number} Desired buffer low water line in seconds
30119 */
30120 ;
30121
30122 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
30123 var currentTime = this.tech_.currentTime();
30124 var initial = Config.BUFFER_LOW_WATER_LINE;
30125 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
30126 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
30127 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
30128 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
30129 };
30130
30131 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
30132 return Config.BUFFER_HIGH_WATER_LINE;
30133 };
30134
30135 return MasterPlaylistController;
30136 }(videojs__default["default"].EventTarget);
30137
30138 /**
30139 * Returns a function that acts as the Enable/disable playlist function.
30140 *
30141 * @param {PlaylistLoader} loader - The master playlist loader
30142 * @param {string} playlistID - id of the playlist
30143 * @param {Function} changePlaylistFn - A function to be called after a
30144 * playlist's enabled-state has been changed. Will NOT be called if a
30145 * playlist's enabled-state is unchanged
30146 * @param {boolean=} enable - Value to set the playlist enabled-state to
30147 * or if undefined returns the current enabled-state for the playlist
30148 * @return {Function} Function for setting/getting enabled
30149 */
30150
30151 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
30152 return function (enable) {
30153 var playlist = loader.master.playlists[playlistID];
30154 var incompatible = isIncompatible(playlist);
30155 var currentlyEnabled = isEnabled(playlist);
30156
30157 if (typeof enable === 'undefined') {
30158 return currentlyEnabled;
30159 }
30160
30161 if (enable) {
30162 delete playlist.disabled;
30163 } else {
30164 playlist.disabled = true;
30165 }
30166
30167 if (enable !== currentlyEnabled && !incompatible) {
30168 // Ensure the outside world knows about our changes
30169 changePlaylistFn();
30170
30171 if (enable) {
30172 loader.trigger('renditionenabled');
30173 } else {
30174 loader.trigger('renditiondisabled');
30175 }
30176 }
30177
30178 return enable;
30179 };
30180 };
30181 /**
30182 * The representation object encapsulates the publicly visible information
30183 * in a media playlist along with a setter/getter-type function (enabled)
30184 * for changing the enabled-state of a particular playlist entry
30185 *
30186 * @class Representation
30187 */
30188
30189
30190 var Representation = function Representation(vhsHandler, playlist, id) {
30191 var mpc = vhsHandler.masterPlaylistController_,
30192 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
30193
30194 var changeType = smoothQualityChange ? 'smooth' : 'fast';
30195 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
30196
30197 if (playlist.attributes) {
30198 var resolution = playlist.attributes.RESOLUTION;
30199 this.width = resolution && resolution.width;
30200 this.height = resolution && resolution.height;
30201 this.bandwidth = playlist.attributes.BANDWIDTH;
30202 }
30203
30204 this.codecs = codecsForPlaylist(mpc.master(), playlist);
30205 this.playlist = playlist; // The id is simply the ordinality of the media playlist
30206 // within the master playlist
30207
30208 this.id = id; // Partially-apply the enableFunction to create a playlist-
30209 // specific variant
30210
30211 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
30212 };
30213 /**
30214 * A mixin function that adds the `representations` api to an instance
30215 * of the VhsHandler class
30216 *
30217 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
30218 * representation API into
30219 */
30220
30221
30222 var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
30223 // Add a single API-specific function to the VhsHandler instance
30224 vhsHandler.representations = function () {
30225 var master = vhsHandler.masterPlaylistController_.master();
30226 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
30227
30228 if (!playlists) {
30229 return [];
30230 }
30231
30232 return playlists.filter(function (media) {
30233 return !isIncompatible(media);
30234 }).map(function (e, i) {
30235 return new Representation(vhsHandler, e, e.id);
30236 });
30237 };
30238 };
30239
30240 /**
30241 * @file playback-watcher.js
30242 *
30243 * Playback starts, and now my watch begins. It shall not end until my death. I shall
30244 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
30245 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
30246 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
30247 * my life and honor to the Playback Watch, for this Player and all the Players to come.
30248 */
30249
30250 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
30251 /**
30252 * @class PlaybackWatcher
30253 */
30254
30255 var PlaybackWatcher = /*#__PURE__*/function () {
30256 /**
30257 * Represents an PlaybackWatcher object.
30258 *
30259 * @class
30260 * @param {Object} options an object that includes the tech and settings
30261 */
30262 function PlaybackWatcher(options) {
30263 var _this = this;
30264
30265 this.masterPlaylistController_ = options.masterPlaylistController;
30266 this.tech_ = options.tech;
30267 this.seekable = options.seekable;
30268 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
30269 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
30270 this.media = options.media;
30271 this.consecutiveUpdates = 0;
30272 this.lastRecordedTime = null;
30273 this.timer_ = null;
30274 this.checkCurrentTimeTimeout_ = null;
30275 this.logger_ = logger('PlaybackWatcher');
30276 this.logger_('initialize');
30277
30278 var playHandler = function playHandler() {
30279 return _this.monitorCurrentTime_();
30280 };
30281
30282 var canPlayHandler = function canPlayHandler() {
30283 return _this.monitorCurrentTime_();
30284 };
30285
30286 var waitingHandler = function waitingHandler() {
30287 return _this.techWaiting_();
30288 };
30289
30290 var cancelTimerHandler = function cancelTimerHandler() {
30291 return _this.cancelTimer_();
30292 };
30293
30294 var mpc = this.masterPlaylistController_;
30295 var loaderTypes = ['main', 'subtitle', 'audio'];
30296 var loaderChecks = {};
30297 loaderTypes.forEach(function (type) {
30298 loaderChecks[type] = {
30299 reset: function reset() {
30300 return _this.resetSegmentDownloads_(type);
30301 },
30302 updateend: function updateend() {
30303 return _this.checkSegmentDownloads_(type);
30304 }
30305 };
30306 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
30307 // isn't changing we want to reset. We cannot assume that the new rendition
30308 // will also be stalled, until after new appends.
30309
30310 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
30311 // This prevents one segment playlists (single vtt or single segment content)
30312 // from being detected as stalling. As the buffer will not change in those cases, since
30313 // the buffer is the entire video duration.
30314
30315 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
30316 });
30317 /**
30318 * We check if a seek was into a gap through the following steps:
30319 * 1. We get a seeking event and we do not get a seeked event. This means that
30320 * a seek was attempted but not completed.
30321 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
30322 * removed everything from our buffer and appended a segment, and should be ready
30323 * to check for gaps.
30324 */
30325
30326 var setSeekingHandlers = function setSeekingHandlers(fn) {
30327 ['main', 'audio'].forEach(function (type) {
30328 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
30329 });
30330 };
30331
30332 this.seekingAppendCheck_ = function () {
30333 if (_this.fixesBadSeeks_()) {
30334 _this.consecutiveUpdates = 0;
30335 _this.lastRecordedTime = _this.tech_.currentTime();
30336 setSeekingHandlers('off');
30337 }
30338 };
30339
30340 this.clearSeekingAppendCheck_ = function () {
30341 return setSeekingHandlers('off');
30342 };
30343
30344 this.watchForBadSeeking_ = function () {
30345 _this.clearSeekingAppendCheck_();
30346
30347 setSeekingHandlers('on');
30348 };
30349
30350 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
30351 this.tech_.on('seeking', this.watchForBadSeeking_);
30352 this.tech_.on('waiting', waitingHandler);
30353 this.tech_.on(timerCancelEvents, cancelTimerHandler);
30354 this.tech_.on('canplay', canPlayHandler);
30355 /*
30356 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
30357 is surfaced in one of two ways:
30358 1) The `waiting` event is fired before the player has buffered content, making it impossible
30359 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
30360 we can check if playback is stalled due to a gap, and skip the gap if necessary.
30361 2) A source with a gap at the beginning of the stream is loaded programatically while the player
30362 is in a playing state. To catch this case, it's important that our one-time play listener is setup
30363 even if the player is in a playing state
30364 */
30365
30366 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
30367
30368 this.dispose = function () {
30369 _this.clearSeekingAppendCheck_();
30370
30371 _this.logger_('dispose');
30372
30373 _this.tech_.off('waiting', waitingHandler);
30374
30375 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
30376
30377 _this.tech_.off('canplay', canPlayHandler);
30378
30379 _this.tech_.off('play', playHandler);
30380
30381 _this.tech_.off('seeking', _this.watchForBadSeeking_);
30382
30383 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
30384
30385 loaderTypes.forEach(function (type) {
30386 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
30387 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
30388
30389 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
30390 });
30391
30392 if (_this.checkCurrentTimeTimeout_) {
30393 window.clearTimeout(_this.checkCurrentTimeTimeout_);
30394 }
30395
30396 _this.cancelTimer_();
30397 };
30398 }
30399 /**
30400 * Periodically check current time to see if playback stopped
30401 *
30402 * @private
30403 */
30404
30405
30406 var _proto = PlaybackWatcher.prototype;
30407
30408 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
30409 this.checkCurrentTime_();
30410
30411 if (this.checkCurrentTimeTimeout_) {
30412 window.clearTimeout(this.checkCurrentTimeTimeout_);
30413 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
30414
30415
30416 this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
30417 }
30418 /**
30419 * Reset stalled download stats for a specific type of loader
30420 *
30421 * @param {string} type
30422 * The segment loader type to check.
30423 *
30424 * @listens SegmentLoader#playlistupdate
30425 * @listens Tech#seeking
30426 * @listens Tech#seeked
30427 */
30428 ;
30429
30430 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
30431 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
30432
30433 if (this[type + "StalledDownloads_"] > 0) {
30434 this.logger_("resetting possible stalled download count for " + type + " loader");
30435 }
30436
30437 this[type + "StalledDownloads_"] = 0;
30438 this[type + "Buffered_"] = loader.buffered_();
30439 }
30440 /**
30441 * Checks on every segment `appendsdone` to see
30442 * if segment appends are making progress. If they are not
30443 * and we are still downloading bytes. We blacklist the playlist.
30444 *
30445 * @param {string} type
30446 * The segment loader type to check.
30447 *
30448 * @listens SegmentLoader#appendsdone
30449 */
30450 ;
30451
30452 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
30453 var mpc = this.masterPlaylistController_;
30454 var loader = mpc[type + "SegmentLoader_"];
30455 var buffered = loader.buffered_();
30456 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
30457 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
30458 // the buffered value for this loader changed
30459 // appends are working
30460
30461 if (isBufferedDifferent) {
30462 this.resetSegmentDownloads_(type);
30463 return;
30464 }
30465
30466 this[type + "StalledDownloads_"]++;
30467 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
30468 playlistId: loader.playlist_ && loader.playlist_.id,
30469 buffered: timeRangesToArray(buffered)
30470 }); // after 10 possibly stalled appends with no reset, exclude
30471
30472 if (this[type + "StalledDownloads_"] < 10) {
30473 return;
30474 }
30475
30476 this.logger_(type + " loader stalled download exclusion");
30477 this.resetSegmentDownloads_(type);
30478 this.tech_.trigger({
30479 type: 'usage',
30480 name: "vhs-" + type + "-download-exclusion"
30481 });
30482
30483 if (type === 'subtitle') {
30484 return;
30485 } // TODO: should we exclude audio tracks rather than main tracks
30486 // when type is audio?
30487
30488
30489 mpc.blacklistCurrentPlaylist({
30490 message: "Excessive " + type + " segment downloading detected."
30491 }, Infinity);
30492 }
30493 /**
30494 * The purpose of this function is to emulate the "waiting" event on
30495 * browsers that do not emit it when they are waiting for more
30496 * data to continue playback
30497 *
30498 * @private
30499 */
30500 ;
30501
30502 _proto.checkCurrentTime_ = function checkCurrentTime_() {
30503 if (this.tech_.paused() || this.tech_.seeking()) {
30504 return;
30505 }
30506
30507 var currentTime = this.tech_.currentTime();
30508 var buffered = this.tech_.buffered();
30509
30510 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
30511 // If current time is at the end of the final buffered region, then any playback
30512 // stall is most likely caused by buffering in a low bandwidth environment. The tech
30513 // should fire a `waiting` event in this scenario, but due to browser and tech
30514 // inconsistencies. Calling `techWaiting_` here allows us to simulate
30515 // responding to a native `waiting` event when the tech fails to emit one.
30516 return this.techWaiting_();
30517 }
30518
30519 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
30520 this.consecutiveUpdates++;
30521 this.waiting_();
30522 } else if (currentTime === this.lastRecordedTime) {
30523 this.consecutiveUpdates++;
30524 } else {
30525 this.consecutiveUpdates = 0;
30526 this.lastRecordedTime = currentTime;
30527 }
30528 }
30529 /**
30530 * Cancels any pending timers and resets the 'timeupdate' mechanism
30531 * designed to detect that we are stalled
30532 *
30533 * @private
30534 */
30535 ;
30536
30537 _proto.cancelTimer_ = function cancelTimer_() {
30538 this.consecutiveUpdates = 0;
30539
30540 if (this.timer_) {
30541 this.logger_('cancelTimer_');
30542 clearTimeout(this.timer_);
30543 }
30544
30545 this.timer_ = null;
30546 }
30547 /**
30548 * Fixes situations where there's a bad seek
30549 *
30550 * @return {boolean} whether an action was taken to fix the seek
30551 * @private
30552 */
30553 ;
30554
30555 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
30556 var seeking = this.tech_.seeking();
30557
30558 if (!seeking) {
30559 return false;
30560 } // TODO: It's possible that these seekable checks should be moved out of this function
30561 // and into a function that runs on seekablechange. It's also possible that we only need
30562 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
30563 // seekable range.
30564
30565
30566 var seekable = this.seekable();
30567 var currentTime = this.tech_.currentTime();
30568 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
30569 var seekTo;
30570
30571 if (isAfterSeekableRange) {
30572 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
30573
30574 seekTo = seekableEnd;
30575 }
30576
30577 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30578 var seekableStart = seekable.start(0); // sync to the beginning of the live window
30579 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
30580
30581 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
30582 // happen in live with a 3 segment playlist), then don't use a time delta
30583 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
30584 }
30585
30586 if (typeof seekTo !== 'undefined') {
30587 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
30588 this.tech_.setCurrentTime(seekTo);
30589 return true;
30590 }
30591
30592 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
30593 var buffered = this.tech_.buffered();
30594 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
30595 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
30596 var media = this.media(); // verify that at least two segment durations or one part duration have been
30597 // appended before checking for a gap.
30598
30599 var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
30600 // appended before checking for a gap.
30601
30602 var bufferedToCheck = [audioBuffered, videoBuffered];
30603
30604 for (var i = 0; i < bufferedToCheck.length; i++) {
30605 // skip null buffered
30606 if (!bufferedToCheck[i]) {
30607 continue;
30608 }
30609
30610 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
30611 // duration behind we haven't appended enough to call this a bad seek.
30612
30613 if (timeAhead < minAppendedDuration) {
30614 return false;
30615 }
30616 }
30617
30618 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
30619 // to seek over the gap
30620
30621 if (nextRange.length === 0) {
30622 return false;
30623 }
30624
30625 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
30626 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
30627 this.tech_.setCurrentTime(seekTo);
30628 return true;
30629 }
30630 /**
30631 * Handler for situations when we determine the player is waiting.
30632 *
30633 * @private
30634 */
30635 ;
30636
30637 _proto.waiting_ = function waiting_() {
30638 if (this.techWaiting_()) {
30639 return;
30640 } // All tech waiting checks failed. Use last resort correction
30641
30642
30643 var currentTime = this.tech_.currentTime();
30644 var buffered = this.tech_.buffered();
30645 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
30646 // region with no indication that anything is amiss (seen in Firefox). Seeking to
30647 // currentTime is usually enough to kickstart the player. This checks that the player
30648 // is currently within a buffered region before attempting a corrective seek.
30649 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
30650 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
30651 // make sure there is ~3 seconds of forward buffer before taking any corrective action
30652 // to avoid triggering an `unknownwaiting` event when the network is slow.
30653
30654 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
30655 this.cancelTimer_();
30656 this.tech_.setCurrentTime(currentTime);
30657 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
30658
30659 this.tech_.trigger({
30660 type: 'usage',
30661 name: 'vhs-unknown-waiting'
30662 });
30663 this.tech_.trigger({
30664 type: 'usage',
30665 name: 'hls-unknown-waiting'
30666 });
30667 return;
30668 }
30669 }
30670 /**
30671 * Handler for situations when the tech fires a `waiting` event
30672 *
30673 * @return {boolean}
30674 * True if an action (or none) was needed to correct the waiting. False if no
30675 * checks passed
30676 * @private
30677 */
30678 ;
30679
30680 _proto.techWaiting_ = function techWaiting_() {
30681 var seekable = this.seekable();
30682 var currentTime = this.tech_.currentTime();
30683
30684 if (this.tech_.seeking() || this.timer_ !== null) {
30685 // Tech is seeking or already waiting on another action, no action needed
30686 return true;
30687 }
30688
30689 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30690 var livePoint = seekable.end(seekable.length - 1);
30691 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
30692 this.cancelTimer_();
30693 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
30694
30695 this.tech_.trigger({
30696 type: 'usage',
30697 name: 'vhs-live-resync'
30698 });
30699 this.tech_.trigger({
30700 type: 'usage',
30701 name: 'hls-live-resync'
30702 });
30703 return true;
30704 }
30705
30706 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
30707 var buffered = this.tech_.buffered();
30708 var videoUnderflow = this.videoUnderflow_({
30709 audioBuffered: sourceUpdater.audioBuffered(),
30710 videoBuffered: sourceUpdater.videoBuffered(),
30711 currentTime: currentTime
30712 });
30713
30714 if (videoUnderflow) {
30715 // Even though the video underflowed and was stuck in a gap, the audio overplayed
30716 // the gap, leading currentTime into a buffered range. Seeking to currentTime
30717 // allows the video to catch up to the audio position without losing any audio
30718 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
30719 this.cancelTimer_();
30720 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
30721
30722 this.tech_.trigger({
30723 type: 'usage',
30724 name: 'vhs-video-underflow'
30725 });
30726 this.tech_.trigger({
30727 type: 'usage',
30728 name: 'hls-video-underflow'
30729 });
30730 return true;
30731 }
30732
30733 var nextRange = findNextRange(buffered, currentTime); // check for gap
30734
30735 if (nextRange.length > 0) {
30736 var difference = nextRange.start(0) - currentTime;
30737 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
30738 this.cancelTimer_();
30739 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
30740 return true;
30741 } // All checks failed. Returning false to indicate failure to correct waiting
30742
30743
30744 return false;
30745 };
30746
30747 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
30748 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
30749 allowSeeksWithinUnsafeLiveWindow = false;
30750 }
30751
30752 if (!seekable.length) {
30753 // we can't make a solid case if there's no seekable, default to false
30754 return false;
30755 }
30756
30757 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
30758 var isLive = !playlist.endList;
30759
30760 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
30761 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
30762 }
30763
30764 if (currentTime > allowedEnd) {
30765 return true;
30766 }
30767
30768 return false;
30769 };
30770
30771 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
30772 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
30773 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
30774 return true;
30775 }
30776
30777 return false;
30778 };
30779
30780 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
30781 var videoBuffered = _ref.videoBuffered,
30782 audioBuffered = _ref.audioBuffered,
30783 currentTime = _ref.currentTime;
30784
30785 // audio only content will not have video underflow :)
30786 if (!videoBuffered) {
30787 return;
30788 }
30789
30790 var gap; // find a gap in demuxed content.
30791
30792 if (videoBuffered.length && audioBuffered.length) {
30793 // in Chrome audio will continue to play for ~3s when we run out of video
30794 // so we have to check that the video buffer did have some buffer in the
30795 // past.
30796 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
30797 var videoRange = findRange(videoBuffered, currentTime);
30798 var audioRange = findRange(audioBuffered, currentTime);
30799
30800 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
30801 gap = {
30802 start: lastVideoRange.end(0),
30803 end: audioRange.end(0)
30804 };
30805 } // find a gap in muxed content.
30806
30807 } else {
30808 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
30809 // stuck in a gap due to video underflow.
30810
30811 if (!nextRange.length) {
30812 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
30813 }
30814 }
30815
30816 if (gap) {
30817 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
30818 return true;
30819 }
30820
30821 return false;
30822 }
30823 /**
30824 * Timer callback. If playback still has not proceeded, then we seek
30825 * to the start of the next buffered region.
30826 *
30827 * @private
30828 */
30829 ;
30830
30831 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
30832 var buffered = this.tech_.buffered();
30833 var currentTime = this.tech_.currentTime();
30834 var nextRange = findNextRange(buffered, currentTime);
30835 this.cancelTimer_();
30836
30837 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
30838 return;
30839 }
30840
30841 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
30842
30843 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
30844 this.tech_.trigger({
30845 type: 'usage',
30846 name: 'vhs-gap-skip'
30847 });
30848 this.tech_.trigger({
30849 type: 'usage',
30850 name: 'hls-gap-skip'
30851 });
30852 };
30853
30854 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
30855 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
30856 // playing for ~3 seconds after the video gap starts. This is done to account for
30857 // video buffer underflow/underrun (note that this is not done when there is audio
30858 // buffer underflow/underrun -- in that case the video will stop as soon as it
30859 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
30860 // video stalls). The player's time will reflect the playthrough of audio, so the
30861 // time will appear as if we are in a buffered region, even if we are stuck in a
30862 // "gap."
30863 //
30864 // Example:
30865 // video buffer: 0 => 10.1, 10.2 => 20
30866 // audio buffer: 0 => 20
30867 // overall buffer: 0 => 10.1, 10.2 => 20
30868 // current time: 13
30869 //
30870 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
30871 // however, the audio continued playing until it reached ~3 seconds past the gap
30872 // (13 seconds), at which point it stops as well. Since current time is past the
30873 // gap, findNextRange will return no ranges.
30874 //
30875 // To check for this issue, we see if there is a gap that starts somewhere within
30876 // a 3 second range (3 seconds +/- 1 second) back from our current time.
30877 var gaps = findGaps(buffered);
30878
30879 for (var i = 0; i < gaps.length; i++) {
30880 var start = gaps.start(i);
30881 var end = gaps.end(i); // gap is starts no more than 4 seconds back
30882
30883 if (currentTime - start < 4 && currentTime - start > 2) {
30884 return {
30885 start: start,
30886 end: end
30887 };
30888 }
30889 }
30890
30891 return null;
30892 };
30893
30894 return PlaybackWatcher;
30895 }();
30896
30897 var defaultOptions = {
30898 errorInterval: 30,
30899 getSource: function getSource(next) {
30900 var tech = this.tech({
30901 IWillNotUseThisInPlugins: true
30902 });
30903 var sourceObj = tech.currentSource_ || this.currentSource();
30904 return next(sourceObj);
30905 }
30906 };
30907 /**
30908 * Main entry point for the plugin
30909 *
30910 * @param {Player} player a reference to a videojs Player instance
30911 * @param {Object} [options] an object with plugin options
30912 * @private
30913 */
30914
30915 var initPlugin = function initPlugin(player, options) {
30916 var lastCalled = 0;
30917 var seekTo = 0;
30918 var localOptions = videojs__default["default"].mergeOptions(defaultOptions, options);
30919 player.ready(function () {
30920 player.trigger({
30921 type: 'usage',
30922 name: 'vhs-error-reload-initialized'
30923 });
30924 player.trigger({
30925 type: 'usage',
30926 name: 'hls-error-reload-initialized'
30927 });
30928 });
30929 /**
30930 * Player modifications to perform that must wait until `loadedmetadata`
30931 * has been triggered
30932 *
30933 * @private
30934 */
30935
30936 var loadedMetadataHandler = function loadedMetadataHandler() {
30937 if (seekTo) {
30938 player.currentTime(seekTo);
30939 }
30940 };
30941 /**
30942 * Set the source on the player element, play, and seek if necessary
30943 *
30944 * @param {Object} sourceObj An object specifying the source url and mime-type to play
30945 * @private
30946 */
30947
30948
30949 var setSource = function setSource(sourceObj) {
30950 if (sourceObj === null || sourceObj === undefined) {
30951 return;
30952 }
30953
30954 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
30955 player.one('loadedmetadata', loadedMetadataHandler);
30956 player.src(sourceObj);
30957 player.trigger({
30958 type: 'usage',
30959 name: 'vhs-error-reload'
30960 });
30961 player.trigger({
30962 type: 'usage',
30963 name: 'hls-error-reload'
30964 });
30965 player.play();
30966 };
30967 /**
30968 * Attempt to get a source from either the built-in getSource function
30969 * or a custom function provided via the options
30970 *
30971 * @private
30972 */
30973
30974
30975 var errorHandler = function errorHandler() {
30976 // Do not attempt to reload the source if a source-reload occurred before
30977 // 'errorInterval' time has elapsed since the last source-reload
30978 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
30979 player.trigger({
30980 type: 'usage',
30981 name: 'vhs-error-reload-canceled'
30982 });
30983 player.trigger({
30984 type: 'usage',
30985 name: 'hls-error-reload-canceled'
30986 });
30987 return;
30988 }
30989
30990 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
30991 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
30992 return;
30993 }
30994
30995 lastCalled = Date.now();
30996 return localOptions.getSource.call(player, setSource);
30997 };
30998 /**
30999 * Unbind any event handlers that were bound by the plugin
31000 *
31001 * @private
31002 */
31003
31004
31005 var cleanupEvents = function cleanupEvents() {
31006 player.off('loadedmetadata', loadedMetadataHandler);
31007 player.off('error', errorHandler);
31008 player.off('dispose', cleanupEvents);
31009 };
31010 /**
31011 * Cleanup before re-initializing the plugin
31012 *
31013 * @param {Object} [newOptions] an object with plugin options
31014 * @private
31015 */
31016
31017
31018 var reinitPlugin = function reinitPlugin(newOptions) {
31019 cleanupEvents();
31020 initPlugin(player, newOptions);
31021 };
31022
31023 player.on('error', errorHandler);
31024 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
31025 // initializing the plugin
31026
31027 player.reloadSourceOnError = reinitPlugin;
31028 };
31029 /**
31030 * Reload the source when an error is detected as long as there
31031 * wasn't an error previously within the last 30 seconds
31032 *
31033 * @param {Object} [options] an object with plugin options
31034 */
31035
31036
31037 var reloadSourceOnError = function reloadSourceOnError(options) {
31038 initPlugin(this, options);
31039 };
31040
31041 var version$4 = "2.14.2";
31042
31043 var version$3 = "6.0.1";
31044
31045 var version$2 = "0.21.1";
31046
31047 var version$1 = "4.7.1";
31048
31049 var version = "3.1.3";
31050
31051 var Vhs = {
31052 PlaylistLoader: PlaylistLoader,
31053 Playlist: Playlist,
31054 utils: utils,
31055 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
31056 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
31057 lastBandwidthSelector: lastBandwidthSelector,
31058 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
31059 comparePlaylistBandwidth: comparePlaylistBandwidth,
31060 comparePlaylistResolution: comparePlaylistResolution,
31061 xhr: xhrFactory()
31062 }; // Define getter/setters for config properties
31063
31064 Object.keys(Config).forEach(function (prop) {
31065 Object.defineProperty(Vhs, prop, {
31066 get: function get() {
31067 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31068 return Config[prop];
31069 },
31070 set: function set(value) {
31071 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31072
31073 if (typeof value !== 'number' || value < 0) {
31074 videojs__default["default"].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
31075 return;
31076 }
31077
31078 Config[prop] = value;
31079 }
31080 });
31081 });
31082 var LOCAL_STORAGE_KEY = 'videojs-vhs';
31083 /**
31084 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
31085 *
31086 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
31087 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
31088 * @function handleVhsMediaChange
31089 */
31090
31091 var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
31092 var newPlaylist = playlistLoader.media();
31093 var selectedIndex = -1;
31094
31095 for (var i = 0; i < qualityLevels.length; i++) {
31096 if (qualityLevels[i].id === newPlaylist.id) {
31097 selectedIndex = i;
31098 break;
31099 }
31100 }
31101
31102 qualityLevels.selectedIndex_ = selectedIndex;
31103 qualityLevels.trigger({
31104 selectedIndex: selectedIndex,
31105 type: 'change'
31106 });
31107 };
31108 /**
31109 * Adds quality levels to list once playlist metadata is available
31110 *
31111 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
31112 * @param {Object} vhs Vhs object to listen to for media events.
31113 * @function handleVhsLoadedMetadata
31114 */
31115
31116
31117 var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
31118 vhs.representations().forEach(function (rep) {
31119 qualityLevels.addQualityLevel(rep);
31120 });
31121 handleVhsMediaChange(qualityLevels, vhs.playlists);
31122 }; // HLS is a source handler, not a tech. Make sure attempts to use it
31123 // as one do not cause exceptions.
31124
31125
31126 Vhs.canPlaySource = function () {
31127 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31128 };
31129
31130 var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
31131 if (!keySystemOptions) {
31132 return keySystemOptions;
31133 }
31134
31135 var codecs = {};
31136
31137 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
31138 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
31139 }
31140
31141 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
31142 codecs.audio = audioPlaylist.attributes.CODECS;
31143 }
31144
31145 var videoContentType = getMimeForCodec(codecs.video);
31146 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
31147
31148 var keySystemContentTypes = {};
31149
31150 for (var keySystem in keySystemOptions) {
31151 keySystemContentTypes[keySystem] = {};
31152
31153 if (audioContentType) {
31154 keySystemContentTypes[keySystem].audioContentType = audioContentType;
31155 }
31156
31157 if (videoContentType) {
31158 keySystemContentTypes[keySystem].videoContentType = videoContentType;
31159 } // Default to using the video playlist's PSSH even though they may be different, as
31160 // videojs-contrib-eme will only accept one in the options.
31161 //
31162 // This shouldn't be an issue for most cases as early intialization will handle all
31163 // unique PSSH values, and if they aren't, then encrypted events should have the
31164 // specific information needed for the unique license.
31165
31166
31167 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
31168 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
31169 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
31170 // so we need to prevent overwriting the URL entirely
31171
31172
31173 if (typeof keySystemOptions[keySystem] === 'string') {
31174 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
31175 }
31176 }
31177
31178 return videojs__default["default"].mergeOptions(keySystemOptions, keySystemContentTypes);
31179 };
31180 /**
31181 * @typedef {Object} KeySystems
31182 *
31183 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
31184 * Note: not all options are listed here.
31185 *
31186 * @property {Uint8Array} [pssh]
31187 * Protection System Specific Header
31188 */
31189
31190 /**
31191 * Goes through all the playlists and collects an array of KeySystems options objects
31192 * containing each playlist's keySystems and their pssh values, if available.
31193 *
31194 * @param {Object[]} playlists
31195 * The playlists to look through
31196 * @param {string[]} keySystems
31197 * The keySystems to collect pssh values for
31198 *
31199 * @return {KeySystems[]}
31200 * An array of KeySystems objects containing available key systems and their
31201 * pssh values
31202 */
31203
31204
31205 var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
31206 return playlists.reduce(function (keySystemsArr, playlist) {
31207 if (!playlist.contentProtection) {
31208 return keySystemsArr;
31209 }
31210
31211 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
31212 var keySystemOptions = playlist.contentProtection[keySystem];
31213
31214 if (keySystemOptions && keySystemOptions.pssh) {
31215 keySystemsObj[keySystem] = {
31216 pssh: keySystemOptions.pssh
31217 };
31218 }
31219
31220 return keySystemsObj;
31221 }, {});
31222
31223 if (Object.keys(keySystemsOptions).length) {
31224 keySystemsArr.push(keySystemsOptions);
31225 }
31226
31227 return keySystemsArr;
31228 }, []);
31229 };
31230 /**
31231 * Returns a promise that waits for the
31232 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
31233 *
31234 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
31235 * browsers.
31236 *
31237 * As per the above ticket, this is particularly important for Chrome, where, if
31238 * unencrypted content is appended before encrypted content and the key session has not
31239 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
31240 * during playback.
31241 *
31242 * @param {Object} player
31243 * The player instance
31244 * @param {Object[]} sourceKeySystems
31245 * The key systems options from the player source
31246 * @param {Object} [audioMedia]
31247 * The active audio media playlist (optional)
31248 * @param {Object[]} mainPlaylists
31249 * The playlists found on the master playlist object
31250 *
31251 * @return {Object}
31252 * Promise that resolves when the key session has been created
31253 */
31254
31255
31256 var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
31257 var player = _ref.player,
31258 sourceKeySystems = _ref.sourceKeySystems,
31259 audioMedia = _ref.audioMedia,
31260 mainPlaylists = _ref.mainPlaylists;
31261
31262 if (!player.eme.initializeMediaKeys) {
31263 return Promise.resolve();
31264 } // TODO should all audio PSSH values be initialized for DRM?
31265 //
31266 // All unique video rendition pssh values are initialized for DRM, but here only
31267 // the initial audio playlist license is initialized. In theory, an encrypted
31268 // event should be fired if the user switches to an alternative audio playlist
31269 // where a license is required, but this case hasn't yet been tested. In addition, there
31270 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
31271 // languages).
31272
31273
31274 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
31275 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
31276 var initializationFinishedPromises = [];
31277 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
31278 // only place where it should not be deduped is for ms-prefixed APIs, but the early
31279 // return for IE11 above, and the existence of modern EME APIs in addition to
31280 // ms-prefixed APIs on Edge should prevent this from being a concern.
31281 // initializeMediaKeys also won't use the webkit-prefixed APIs.
31282
31283 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
31284 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
31285 player.tech_.one('keysessioncreated', resolve);
31286 }));
31287 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
31288 player.eme.initializeMediaKeys({
31289 keySystems: keySystemsOptions
31290 }, function (err) {
31291 if (err) {
31292 reject(err);
31293 return;
31294 }
31295
31296 resolve();
31297 });
31298 }));
31299 }); // The reasons Promise.race is chosen over Promise.any:
31300 //
31301 // * Promise.any is only available in Safari 14+.
31302 // * None of these promises are expected to reject. If they do reject, it might be
31303 // better here for the race to surface the rejection, rather than mask it by using
31304 // Promise.any.
31305
31306 return Promise.race([// If a session was previously created, these will all finish resolving without
31307 // creating a new session, otherwise it will take until the end of all license
31308 // requests, which is why the key session check is used (to make setup much faster).
31309 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
31310 Promise.race(keySessionCreatedPromises)]);
31311 };
31312 /**
31313 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
31314 * there are keySystems on the source, sets up source options to prepare the source for
31315 * eme.
31316 *
31317 * @param {Object} player
31318 * The player instance
31319 * @param {Object[]} sourceKeySystems
31320 * The key systems options from the player source
31321 * @param {Object} media
31322 * The active media playlist
31323 * @param {Object} [audioMedia]
31324 * The active audio media playlist (optional)
31325 *
31326 * @return {boolean}
31327 * Whether or not options were configured and EME is available
31328 */
31329
31330 var setupEmeOptions = function setupEmeOptions(_ref2) {
31331 var player = _ref2.player,
31332 sourceKeySystems = _ref2.sourceKeySystems,
31333 media = _ref2.media,
31334 audioMedia = _ref2.audioMedia;
31335 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
31336
31337 if (!sourceOptions) {
31338 return false;
31339 }
31340
31341 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
31342 // do nothing.
31343
31344 if (sourceOptions && !player.eme) {
31345 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
31346 return false;
31347 }
31348
31349 return true;
31350 };
31351
31352 var getVhsLocalStorage = function getVhsLocalStorage() {
31353 if (!window.localStorage) {
31354 return null;
31355 }
31356
31357 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
31358
31359 if (!storedObject) {
31360 return null;
31361 }
31362
31363 try {
31364 return JSON.parse(storedObject);
31365 } catch (e) {
31366 // someone may have tampered with the value
31367 return null;
31368 }
31369 };
31370
31371 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
31372 if (!window.localStorage) {
31373 return false;
31374 }
31375
31376 var objectToStore = getVhsLocalStorage();
31377 objectToStore = objectToStore ? videojs__default["default"].mergeOptions(objectToStore, options) : options;
31378
31379 try {
31380 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
31381 } catch (e) {
31382 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
31383 // storage is set to 0).
31384 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
31385 // No need to perform any operation.
31386 return false;
31387 }
31388
31389 return objectToStore;
31390 };
31391 /**
31392 * Parses VHS-supported media types from data URIs. See
31393 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
31394 * for information on data URIs.
31395 *
31396 * @param {string} dataUri
31397 * The data URI
31398 *
31399 * @return {string|Object}
31400 * The parsed object/string, or the original string if no supported media type
31401 * was found
31402 */
31403
31404
31405 var expandDataUri = function expandDataUri(dataUri) {
31406 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
31407 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
31408 } // no known case for this data URI, return the string as-is
31409
31410
31411 return dataUri;
31412 };
31413 /**
31414 * Whether the browser has built-in HLS support.
31415 */
31416
31417
31418 Vhs.supportsNativeHls = function () {
31419 if (!document || !document.createElement) {
31420 return false;
31421 }
31422
31423 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
31424
31425 if (!videojs__default["default"].getTech('Html5').isSupported()) {
31426 return false;
31427 } // HLS manifests can go by many mime-types
31428
31429
31430 var canPlay = [// Apple santioned
31431 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
31432 'audio/mpegurl', // Very common
31433 'audio/x-mpegurl', // Very common
31434 'application/x-mpegurl', // Included for completeness
31435 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
31436 return canPlay.some(function (canItPlay) {
31437 return /maybe|probably/i.test(video.canPlayType(canItPlay));
31438 });
31439 }();
31440
31441 Vhs.supportsNativeDash = function () {
31442 if (!document || !document.createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
31443 return false;
31444 }
31445
31446 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
31447 }();
31448
31449 Vhs.supportsTypeNatively = function (type) {
31450 if (type === 'hls') {
31451 return Vhs.supportsNativeHls;
31452 }
31453
31454 if (type === 'dash') {
31455 return Vhs.supportsNativeDash;
31456 }
31457
31458 return false;
31459 };
31460 /**
31461 * HLS is a source handler, not a tech. Make sure attempts to use it
31462 * as one do not cause exceptions.
31463 */
31464
31465
31466 Vhs.isSupported = function () {
31467 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31468 };
31469
31470 var Component = videojs__default["default"].getComponent('Component');
31471 /**
31472 * The Vhs Handler object, where we orchestrate all of the parts
31473 * of HLS to interact with video.js
31474 *
31475 * @class VhsHandler
31476 * @extends videojs.Component
31477 * @param {Object} source the soruce object
31478 * @param {Tech} tech the parent tech object
31479 * @param {Object} options optional and required options
31480 */
31481
31482 var VhsHandler = /*#__PURE__*/function (_Component) {
31483 inheritsLoose(VhsHandler, _Component);
31484
31485 function VhsHandler(source, tech, options) {
31486 var _this;
31487
31488 _this = _Component.call(this, tech, videojs__default["default"].mergeOptions(options.hls, options.vhs)) || this;
31489
31490 if (options.hls && Object.keys(options.hls).length) {
31491 videojs__default["default"].log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
31492 } // if a tech level `initialBandwidth` option was passed
31493 // use that over the VHS level `bandwidth` option
31494
31495
31496 if (typeof options.initialBandwidth === 'number') {
31497 _this.options_.bandwidth = options.initialBandwidth;
31498 }
31499
31500 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
31501 // backwards-compatibility
31502
31503 if (tech.options_ && tech.options_.playerId) {
31504 var _player = videojs__default["default"](tech.options_.playerId);
31505
31506 if (!_player.hasOwnProperty('hls')) {
31507 Object.defineProperty(_player, 'hls', {
31508 get: function get() {
31509 videojs__default["default"].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
31510 tech.trigger({
31511 type: 'usage',
31512 name: 'hls-player-access'
31513 });
31514 return assertThisInitialized(_this);
31515 },
31516 configurable: true
31517 });
31518 }
31519
31520 if (!_player.hasOwnProperty('vhs')) {
31521 Object.defineProperty(_player, 'vhs', {
31522 get: function get() {
31523 videojs__default["default"].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
31524 tech.trigger({
31525 type: 'usage',
31526 name: 'vhs-player-access'
31527 });
31528 return assertThisInitialized(_this);
31529 },
31530 configurable: true
31531 });
31532 }
31533
31534 if (!_player.hasOwnProperty('dash')) {
31535 Object.defineProperty(_player, 'dash', {
31536 get: function get() {
31537 videojs__default["default"].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
31538 return assertThisInitialized(_this);
31539 },
31540 configurable: true
31541 });
31542 }
31543
31544 _this.player_ = _player;
31545 }
31546
31547 _this.tech_ = tech;
31548 _this.source_ = source;
31549 _this.stats = {};
31550 _this.ignoreNextSeekingEvent_ = false;
31551
31552 _this.setOptions_();
31553
31554 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
31555 tech.overrideNativeAudioTracks(true);
31556 tech.overrideNativeVideoTracks(true);
31557 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
31558 // overriding native HLS only works if audio tracks have been emulated
31559 // error early if we're misconfigured
31560 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
31561 } // listen for fullscreenchange events for this player so that we
31562 // can adjust our quality selection quickly
31563
31564
31565 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
31566 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
31567
31568 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
31569 _this.masterPlaylistController_.fastQualityChange_();
31570 } else {
31571 // When leaving fullscreen, since the in page pixel dimensions should be smaller
31572 // than full screen, see if there should be a rendition switch down to preserve
31573 // bandwidth.
31574 _this.masterPlaylistController_.checkABR_();
31575 }
31576 });
31577
31578 _this.on(_this.tech_, 'seeking', function () {
31579 if (this.ignoreNextSeekingEvent_) {
31580 this.ignoreNextSeekingEvent_ = false;
31581 return;
31582 }
31583
31584 this.setCurrentTime(this.tech_.currentTime());
31585 });
31586
31587 _this.on(_this.tech_, 'error', function () {
31588 // verify that the error was real and we are loaded
31589 // enough to have mpc loaded.
31590 if (this.tech_.error() && this.masterPlaylistController_) {
31591 this.masterPlaylistController_.pauseLoading();
31592 }
31593 });
31594
31595 _this.on(_this.tech_, 'play', _this.play);
31596
31597 return _this;
31598 }
31599
31600 var _proto = VhsHandler.prototype;
31601
31602 _proto.setOptions_ = function setOptions_() {
31603 var _this2 = this;
31604
31605 // defaults
31606 this.options_.withCredentials = this.options_.withCredentials || false;
31607 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
31608 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
31609 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
31610 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
31611 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
31612 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
31613 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
31614 this.options_.customTagParsers = this.options_.customTagParsers || [];
31615 this.options_.customTagMappers = this.options_.customTagMappers || [];
31616 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
31617
31618 if (typeof this.options_.blacklistDuration !== 'number') {
31619 this.options_.blacklistDuration = 5 * 60;
31620 }
31621
31622 if (typeof this.options_.bandwidth !== 'number') {
31623 if (this.options_.useBandwidthFromLocalStorage) {
31624 var storedObject = getVhsLocalStorage();
31625
31626 if (storedObject && storedObject.bandwidth) {
31627 this.options_.bandwidth = storedObject.bandwidth;
31628 this.tech_.trigger({
31629 type: 'usage',
31630 name: 'vhs-bandwidth-from-local-storage'
31631 });
31632 this.tech_.trigger({
31633 type: 'usage',
31634 name: 'hls-bandwidth-from-local-storage'
31635 });
31636 }
31637
31638 if (storedObject && storedObject.throughput) {
31639 this.options_.throughput = storedObject.throughput;
31640 this.tech_.trigger({
31641 type: 'usage',
31642 name: 'vhs-throughput-from-local-storage'
31643 });
31644 this.tech_.trigger({
31645 type: 'usage',
31646 name: 'hls-throughput-from-local-storage'
31647 });
31648 }
31649 }
31650 } // if bandwidth was not set by options or pulled from local storage, start playlist
31651 // selection at a reasonable bandwidth
31652
31653
31654 if (typeof this.options_.bandwidth !== 'number') {
31655 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
31656 } // If the bandwidth number is unchanged from the initial setting
31657 // then this takes precedence over the enableLowInitialPlaylist option
31658
31659
31660 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
31661
31662 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
31663 if (typeof _this2.source_[option] !== 'undefined') {
31664 _this2.options_[option] = _this2.source_[option];
31665 }
31666 });
31667 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
31668 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
31669 }
31670 /**
31671 * called when player.src gets called, handle a new source
31672 *
31673 * @param {Object} src the source object to handle
31674 */
31675 ;
31676
31677 _proto.src = function src(_src, type) {
31678 var _this3 = this;
31679
31680 // do nothing if the src is falsey
31681 if (!_src) {
31682 return;
31683 }
31684
31685 this.setOptions_(); // add master playlist controller options
31686
31687 this.options_.src = expandDataUri(this.source_.src);
31688 this.options_.tech = this.tech_;
31689 this.options_.externVhs = Vhs;
31690 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
31691
31692 this.options_.seekTo = function (time) {
31693 _this3.tech_.setCurrentTime(time);
31694 };
31695
31696 if (this.options_.smoothQualityChange) {
31697 videojs__default["default"].log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
31698 }
31699
31700 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
31701 var playbackWatcherOptions = videojs__default["default"].mergeOptions({
31702 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
31703 }, this.options_, {
31704 seekable: function seekable() {
31705 return _this3.seekable();
31706 },
31707 media: function media() {
31708 return _this3.masterPlaylistController_.media();
31709 },
31710 masterPlaylistController: this.masterPlaylistController_
31711 });
31712 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
31713 this.masterPlaylistController_.on('error', function () {
31714 var player = videojs__default["default"].players[_this3.tech_.options_.playerId];
31715 var error = _this3.masterPlaylistController_.error;
31716
31717 if (typeof error === 'object' && !error.code) {
31718 error.code = 3;
31719 } else if (typeof error === 'string') {
31720 error = {
31721 message: error,
31722 code: 3
31723 };
31724 }
31725
31726 player.error(error);
31727 });
31728 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
31729 // compatibility with < v2
31730
31731 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
31732 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
31733
31734 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
31735 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
31736 // controller. Using a custom property for backwards compatibility
31737 // with < v2
31738
31739 Object.defineProperties(this, {
31740 selectPlaylist: {
31741 get: function get() {
31742 return this.masterPlaylistController_.selectPlaylist;
31743 },
31744 set: function set(selectPlaylist) {
31745 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
31746 }
31747 },
31748 throughput: {
31749 get: function get() {
31750 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
31751 },
31752 set: function set(throughput) {
31753 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
31754 // for the cumulative average
31755
31756 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
31757 }
31758 },
31759 bandwidth: {
31760 get: function get() {
31761 var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
31762 var networkInformation = window.navigator.connection || window.navigator.mozConnection || window.navigator.webkitConnection;
31763 var tenMbpsAsBitsPerSecond = 10e6;
31764
31765 if (this.options_.useNetworkInformationApi && networkInformation) {
31766 // downlink returns Mbps
31767 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
31768 var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
31769 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
31770 // high quality streams are not filtered out.
31771
31772 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
31773 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
31774 } else {
31775 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
31776 }
31777 }
31778
31779 return playerBandwidthEst;
31780 },
31781 set: function set(bandwidth) {
31782 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
31783 // `count` is set to zero that current value of `rate` isn't included
31784 // in the cumulative average
31785
31786 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
31787 rate: 0,
31788 count: 0
31789 };
31790 }
31791 },
31792
31793 /**
31794 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
31795 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
31796 * the entire process after that - decryption, transmuxing, and appending - provided
31797 * by `throughput`.
31798 *
31799 * Since the two process are serial, the overall system bandwidth is given by:
31800 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
31801 */
31802 systemBandwidth: {
31803 get: function get() {
31804 var invBandwidth = 1 / (this.bandwidth || 1);
31805 var invThroughput;
31806
31807 if (this.throughput > 0) {
31808 invThroughput = 1 / this.throughput;
31809 } else {
31810 invThroughput = 0;
31811 }
31812
31813 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
31814 return systemBitrate;
31815 },
31816 set: function set() {
31817 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
31818 }
31819 }
31820 });
31821
31822 if (this.options_.bandwidth) {
31823 this.bandwidth = this.options_.bandwidth;
31824 }
31825
31826 if (this.options_.throughput) {
31827 this.throughput = this.options_.throughput;
31828 }
31829
31830 Object.defineProperties(this.stats, {
31831 bandwidth: {
31832 get: function get() {
31833 return _this3.bandwidth || 0;
31834 },
31835 enumerable: true
31836 },
31837 mediaRequests: {
31838 get: function get() {
31839 return _this3.masterPlaylistController_.mediaRequests_() || 0;
31840 },
31841 enumerable: true
31842 },
31843 mediaRequestsAborted: {
31844 get: function get() {
31845 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
31846 },
31847 enumerable: true
31848 },
31849 mediaRequestsTimedout: {
31850 get: function get() {
31851 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
31852 },
31853 enumerable: true
31854 },
31855 mediaRequestsErrored: {
31856 get: function get() {
31857 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
31858 },
31859 enumerable: true
31860 },
31861 mediaTransferDuration: {
31862 get: function get() {
31863 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
31864 },
31865 enumerable: true
31866 },
31867 mediaBytesTransferred: {
31868 get: function get() {
31869 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
31870 },
31871 enumerable: true
31872 },
31873 mediaSecondsLoaded: {
31874 get: function get() {
31875 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
31876 },
31877 enumerable: true
31878 },
31879 mediaAppends: {
31880 get: function get() {
31881 return _this3.masterPlaylistController_.mediaAppends_() || 0;
31882 },
31883 enumerable: true
31884 },
31885 mainAppendsToLoadedData: {
31886 get: function get() {
31887 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
31888 },
31889 enumerable: true
31890 },
31891 audioAppendsToLoadedData: {
31892 get: function get() {
31893 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
31894 },
31895 enumerable: true
31896 },
31897 appendsToLoadedData: {
31898 get: function get() {
31899 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
31900 },
31901 enumerable: true
31902 },
31903 timeToLoadedData: {
31904 get: function get() {
31905 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
31906 },
31907 enumerable: true
31908 },
31909 buffered: {
31910 get: function get() {
31911 return timeRangesToArray(_this3.tech_.buffered());
31912 },
31913 enumerable: true
31914 },
31915 currentTime: {
31916 get: function get() {
31917 return _this3.tech_.currentTime();
31918 },
31919 enumerable: true
31920 },
31921 currentSource: {
31922 get: function get() {
31923 return _this3.tech_.currentSource_;
31924 },
31925 enumerable: true
31926 },
31927 currentTech: {
31928 get: function get() {
31929 return _this3.tech_.name_;
31930 },
31931 enumerable: true
31932 },
31933 duration: {
31934 get: function get() {
31935 return _this3.tech_.duration();
31936 },
31937 enumerable: true
31938 },
31939 master: {
31940 get: function get() {
31941 return _this3.playlists.master;
31942 },
31943 enumerable: true
31944 },
31945 playerDimensions: {
31946 get: function get() {
31947 return _this3.tech_.currentDimensions();
31948 },
31949 enumerable: true
31950 },
31951 seekable: {
31952 get: function get() {
31953 return timeRangesToArray(_this3.tech_.seekable());
31954 },
31955 enumerable: true
31956 },
31957 timestamp: {
31958 get: function get() {
31959 return Date.now();
31960 },
31961 enumerable: true
31962 },
31963 videoPlaybackQuality: {
31964 get: function get() {
31965 return _this3.tech_.getVideoPlaybackQuality();
31966 },
31967 enumerable: true
31968 }
31969 });
31970 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
31971 this.tech_.on('bandwidthupdate', function () {
31972 if (_this3.options_.useBandwidthFromLocalStorage) {
31973 updateVhsLocalStorage({
31974 bandwidth: _this3.bandwidth,
31975 throughput: Math.round(_this3.throughput)
31976 });
31977 }
31978 });
31979 this.masterPlaylistController_.on('selectedinitialmedia', function () {
31980 // Add the manual rendition mix-in to VhsHandler
31981 renditionSelectionMixin(_this3);
31982 });
31983 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
31984 _this3.setupEme_();
31985 }); // the bandwidth of the primary segment loader is our best
31986 // estimate of overall bandwidth
31987
31988 this.on(this.masterPlaylistController_, 'progress', function () {
31989 this.tech_.trigger('progress');
31990 }); // In the live case, we need to ignore the very first `seeking` event since
31991 // that will be the result of the seek-to-live behavior
31992
31993 this.on(this.masterPlaylistController_, 'firstplay', function () {
31994 this.ignoreNextSeekingEvent_ = true;
31995 });
31996 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
31997 // this can occur if someone sets the src in player.ready(), for instance
31998
31999 if (!this.tech_.el()) {
32000 return;
32001 }
32002
32003 this.mediaSourceUrl_ = window.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
32004 this.tech_.src(this.mediaSourceUrl_);
32005 };
32006
32007 _proto.createKeySessions_ = function createKeySessions_() {
32008 var _this4 = this;
32009
32010 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32011 this.logger_('waiting for EME key session creation');
32012 waitForKeySessionCreation({
32013 player: this.player_,
32014 sourceKeySystems: this.source_.keySystems,
32015 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
32016 mainPlaylists: this.playlists.master.playlists
32017 }).then(function () {
32018 _this4.logger_('created EME key session');
32019
32020 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
32021 }).catch(function (err) {
32022 _this4.logger_('error while creating EME key session', err);
32023
32024 _this4.player_.error({
32025 message: 'Failed to initialize media keys for EME',
32026 code: 3
32027 });
32028 });
32029 };
32030
32031 _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
32032 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
32033 // the key is in the manifest. While this should've happened on initial source load, it
32034 // may happen again in live streams where the keys change, and the manifest info
32035 // reflects the update.
32036 //
32037 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
32038 // already requested keys for, we don't have to worry about this generating extraneous
32039 // requests.
32040 this.logger_('waitingforkey fired, attempting to create any new key sessions');
32041 this.createKeySessions_();
32042 }
32043 /**
32044 * If necessary and EME is available, sets up EME options and waits for key session
32045 * creation.
32046 *
32047 * This function also updates the source updater so taht it can be used, as for some
32048 * browsers, EME must be configured before content is appended (if appending unencrypted
32049 * content before encrypted content).
32050 */
32051 ;
32052
32053 _proto.setupEme_ = function setupEme_() {
32054 var _this5 = this;
32055
32056 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32057 var didSetupEmeOptions = setupEmeOptions({
32058 player: this.player_,
32059 sourceKeySystems: this.source_.keySystems,
32060 media: this.playlists.media(),
32061 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
32062 });
32063 this.player_.tech_.on('keystatuschange', function (e) {
32064 if (e.status === 'output-restricted') {
32065 _this5.masterPlaylistController_.blacklistCurrentPlaylist({
32066 playlist: _this5.masterPlaylistController_.media(),
32067 message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
32068 blacklistDuration: Infinity
32069 });
32070 }
32071 });
32072 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
32073 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
32074 // promises.
32075
32076 if (videojs__default["default"].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
32077 // If EME options were not set up, we've done all we could to initialize EME.
32078 this.masterPlaylistController_.sourceUpdater_.initializedEme();
32079 return;
32080 }
32081
32082 this.createKeySessions_();
32083 }
32084 /**
32085 * Initializes the quality levels and sets listeners to update them.
32086 *
32087 * @method setupQualityLevels_
32088 * @private
32089 */
32090 ;
32091
32092 _proto.setupQualityLevels_ = function setupQualityLevels_() {
32093 var _this6 = this;
32094
32095 var player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
32096 // or qualityLevels_ listeners have already been setup, do nothing.
32097
32098 if (!player || !player.qualityLevels || this.qualityLevels_) {
32099 return;
32100 }
32101
32102 this.qualityLevels_ = player.qualityLevels();
32103 this.masterPlaylistController_.on('selectedinitialmedia', function () {
32104 handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
32105 });
32106 this.playlists.on('mediachange', function () {
32107 handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
32108 });
32109 }
32110 /**
32111 * return the version
32112 */
32113 ;
32114
32115 VhsHandler.version = function version$5() {
32116 return {
32117 '@videojs/http-streaming': version$4,
32118 'mux.js': version$3,
32119 'mpd-parser': version$2,
32120 'm3u8-parser': version$1,
32121 'aes-decrypter': version
32122 };
32123 }
32124 /**
32125 * return the version
32126 */
32127 ;
32128
32129 _proto.version = function version() {
32130 return this.constructor.version();
32131 };
32132
32133 _proto.canChangeType = function canChangeType() {
32134 return SourceUpdater.canChangeType();
32135 }
32136 /**
32137 * Begin playing the video.
32138 */
32139 ;
32140
32141 _proto.play = function play() {
32142 this.masterPlaylistController_.play();
32143 }
32144 /**
32145 * a wrapper around the function in MasterPlaylistController
32146 */
32147 ;
32148
32149 _proto.setCurrentTime = function setCurrentTime(currentTime) {
32150 this.masterPlaylistController_.setCurrentTime(currentTime);
32151 }
32152 /**
32153 * a wrapper around the function in MasterPlaylistController
32154 */
32155 ;
32156
32157 _proto.duration = function duration() {
32158 return this.masterPlaylistController_.duration();
32159 }
32160 /**
32161 * a wrapper around the function in MasterPlaylistController
32162 */
32163 ;
32164
32165 _proto.seekable = function seekable() {
32166 return this.masterPlaylistController_.seekable();
32167 }
32168 /**
32169 * Abort all outstanding work and cleanup.
32170 */
32171 ;
32172
32173 _proto.dispose = function dispose() {
32174 if (this.playbackWatcher_) {
32175 this.playbackWatcher_.dispose();
32176 }
32177
32178 if (this.masterPlaylistController_) {
32179 this.masterPlaylistController_.dispose();
32180 }
32181
32182 if (this.qualityLevels_) {
32183 this.qualityLevels_.dispose();
32184 }
32185
32186 if (this.player_) {
32187 delete this.player_.vhs;
32188 delete this.player_.dash;
32189 delete this.player_.hls;
32190 }
32191
32192 if (this.tech_ && this.tech_.vhs) {
32193 delete this.tech_.vhs;
32194 } // don't check this.tech_.hls as it will log a deprecated warning
32195
32196
32197 if (this.tech_) {
32198 delete this.tech_.hls;
32199 }
32200
32201 if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
32202 window.URL.revokeObjectURL(this.mediaSourceUrl_);
32203 this.mediaSourceUrl_ = null;
32204 }
32205
32206 if (this.tech_) {
32207 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
32208 }
32209
32210 _Component.prototype.dispose.call(this);
32211 };
32212
32213 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
32214 return getProgramTime({
32215 playlist: this.masterPlaylistController_.media(),
32216 time: time,
32217 callback: callback
32218 });
32219 } // the player must be playing before calling this
32220 ;
32221
32222 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
32223 if (pauseAfterSeek === void 0) {
32224 pauseAfterSeek = true;
32225 }
32226
32227 if (retryCount === void 0) {
32228 retryCount = 2;
32229 }
32230
32231 return seekToProgramTime({
32232 programTime: programTime,
32233 playlist: this.masterPlaylistController_.media(),
32234 retryCount: retryCount,
32235 pauseAfterSeek: pauseAfterSeek,
32236 seekTo: this.options_.seekTo,
32237 tech: this.options_.tech,
32238 callback: callback
32239 });
32240 };
32241
32242 return VhsHandler;
32243 }(Component);
32244 /**
32245 * The Source Handler object, which informs video.js what additional
32246 * MIME types are supported and sets up playback. It is registered
32247 * automatically to the appropriate tech based on the capabilities of
32248 * the browser it is running in. It is not necessary to use or modify
32249 * this object in normal usage.
32250 */
32251
32252
32253 var VhsSourceHandler = {
32254 name: 'videojs-http-streaming',
32255 VERSION: version$4,
32256 canHandleSource: function canHandleSource(srcObj, options) {
32257 if (options === void 0) {
32258 options = {};
32259 }
32260
32261 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32262 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
32263 },
32264 handleSource: function handleSource(source, tech, options) {
32265 if (options === void 0) {
32266 options = {};
32267 }
32268
32269 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32270 tech.vhs = new VhsHandler(source, tech, localOptions);
32271
32272 if (!videojs__default["default"].hasOwnProperty('hls')) {
32273 Object.defineProperty(tech, 'hls', {
32274 get: function get() {
32275 videojs__default["default"].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
32276 return tech.vhs;
32277 },
32278 configurable: true
32279 });
32280 }
32281
32282 tech.vhs.xhr = xhrFactory();
32283 tech.vhs.src(source.src, source.type);
32284 return tech.vhs;
32285 },
32286 canPlayType: function canPlayType(type, options) {
32287 if (options === void 0) {
32288 options = {};
32289 }
32290
32291 var _videojs$mergeOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options),
32292 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs;
32293
32294 _videojs$mergeOptions2 = _videojs$mergeOptions2 === void 0 ? {} : _videojs$mergeOptions2;
32295 var _videojs$mergeOptions3 = _videojs$mergeOptions2.overrideNative,
32296 overrideNative = _videojs$mergeOptions3 === void 0 ? !videojs__default["default"].browser.IS_ANY_SAFARI : _videojs$mergeOptions3,
32297 _videojs$mergeOptions4 = _videojs$mergeOptions.hls;
32298 _videojs$mergeOptions4 = _videojs$mergeOptions4 === void 0 ? {} : _videojs$mergeOptions4;
32299 var _videojs$mergeOptions5 = _videojs$mergeOptions4.overrideNative,
32300 legacyOverrideNative = _videojs$mergeOptions5 === void 0 ? false : _videojs$mergeOptions5;
32301 var supportedType = simpleTypeFromSourceType(type);
32302 var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || legacyOverrideNative || overrideNative);
32303 return canUseMsePlayback ? 'maybe' : '';
32304 }
32305 };
32306 /**
32307 * Check to see if the native MediaSource object exists and supports
32308 * an MP4 container with both H.264 video and AAC-LC audio.
32309 *
32310 * @return {boolean} if native media sources are supported
32311 */
32312
32313 var supportsNativeMediaSources = function supportsNativeMediaSources() {
32314 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
32315 }; // register source handlers with the appropriate techs
32316
32317
32318 if (supportsNativeMediaSources()) {
32319 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
32320 }
32321
32322 videojs__default["default"].VhsHandler = VhsHandler;
32323 Object.defineProperty(videojs__default["default"], 'HlsHandler', {
32324 get: function get() {
32325 videojs__default["default"].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
32326 return VhsHandler;
32327 },
32328 configurable: true
32329 });
32330 videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
32331 Object.defineProperty(videojs__default["default"], 'HlsSourceHandler', {
32332 get: function get() {
32333 videojs__default["default"].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
32334 return VhsSourceHandler;
32335 },
32336 configurable: true
32337 });
32338 videojs__default["default"].Vhs = Vhs;
32339 Object.defineProperty(videojs__default["default"], 'Hls', {
32340 get: function get() {
32341 videojs__default["default"].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
32342 return Vhs;
32343 },
32344 configurable: true
32345 });
32346
32347 if (!videojs__default["default"].use) {
32348 videojs__default["default"].registerComponent('Hls', Vhs);
32349 videojs__default["default"].registerComponent('Vhs', Vhs);
32350 }
32351
32352 videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
32353 videojs__default["default"].options.hls = videojs__default["default"].options.hls || {};
32354
32355 if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
32356 var registerPlugin = videojs__default["default"].registerPlugin || videojs__default["default"].plugin;
32357 registerPlugin('reloadSourceOnError', reloadSourceOnError);
32358 }
32359
32360 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
32361 exports.Vhs = Vhs;
32362 exports.VhsHandler = VhsHandler;
32363 exports.VhsSourceHandler = VhsSourceHandler;
32364 exports.emeKeySystems = emeKeySystems;
32365 exports.expandDataUri = expandDataUri;
32366 exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
32367 exports.setupEmeOptions = setupEmeOptions;
32368 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
32369 exports.waitForKeySessionCreation = waitForKeySessionCreation;
32370
32371 Object.defineProperty(exports, '__esModule', { value: true });
32372
32373}));