UNPKG

1.11 MBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.16.2 @license Apache-2.0 */
2(function (global, factory) {
3 typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('video.js'), require('@xmldom/xmldom')) :
4 typeof define === 'function' && define.amd ? define(['exports', 'video.js', '@xmldom/xmldom'], factory) :
5 (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.httpStreaming = {}, global.videojs, global.window));
6})(this, (function (exports, videojs, xmldom) { 'use strict';
7
8 function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
10 var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
11
12 function createCommonjsModule(fn, basedir, module) {
13 return module = {
14 path: basedir,
15 exports: {},
16 require: function (path, base) {
17 return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
18 }
19 }, fn(module, module.exports), module.exports;
20 }
21
22 function commonjsRequire () {
23 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
24 }
25
26 var assertThisInitialized = createCommonjsModule(function (module) {
27 function _assertThisInitialized(self) {
28 if (self === void 0) {
29 throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
30 }
31
32 return self;
33 }
34
35 module.exports = _assertThisInitialized;
36 module.exports["default"] = module.exports, module.exports.__esModule = true;
37 });
38
39 var setPrototypeOf = createCommonjsModule(function (module) {
40 function _setPrototypeOf(o, p) {
41 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
42 o.__proto__ = p;
43 return o;
44 };
45
46 module.exports["default"] = module.exports, module.exports.__esModule = true;
47 return _setPrototypeOf(o, p);
48 }
49
50 module.exports = _setPrototypeOf;
51 module.exports["default"] = module.exports, module.exports.__esModule = true;
52 });
53
54 var inheritsLoose = createCommonjsModule(function (module) {
55 function _inheritsLoose(subClass, superClass) {
56 subClass.prototype = Object.create(superClass.prototype);
57 subClass.prototype.constructor = subClass;
58 setPrototypeOf(subClass, superClass);
59 }
60
61 module.exports = _inheritsLoose;
62 module.exports["default"] = module.exports, module.exports.__esModule = true;
63 });
64
65 var urlToolkit = createCommonjsModule(function (module, exports) {
66 // see https://tools.ietf.org/html/rfc1808
67 (function (root) {
68 var URL_REGEX = /^((?:[a-zA-Z0-9+\-.]+:)?)(\/\/[^\/?#]*)?((?:[^\/?#]*\/)*[^;?#]*)?(;[^?#]*)?(\?[^#]*)?(#[^]*)?$/;
69 var FIRST_SEGMENT_REGEX = /^([^\/?#]*)([^]*)$/;
70 var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
71 var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
72 var URLToolkit = {
73 // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
74 // E.g
75 // With opts.alwaysNormalize = false (default, spec compliant)
76 // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
77 // With opts.alwaysNormalize = true (not spec compliant)
78 // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
79 buildAbsoluteURL: function buildAbsoluteURL(baseURL, relativeURL, opts) {
80 opts = opts || {}; // remove any remaining space and CRLF
81
82 baseURL = baseURL.trim();
83 relativeURL = relativeURL.trim();
84
85 if (!relativeURL) {
86 // 2a) If the embedded URL is entirely empty, it inherits the
87 // entire base URL (i.e., is set equal to the base URL)
88 // and we are done.
89 if (!opts.alwaysNormalize) {
90 return baseURL;
91 }
92
93 var basePartsForNormalise = URLToolkit.parseURL(baseURL);
94
95 if (!basePartsForNormalise) {
96 throw new Error('Error trying to parse base URL.');
97 }
98
99 basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);
100 return URLToolkit.buildURLFromParts(basePartsForNormalise);
101 }
102
103 var relativeParts = URLToolkit.parseURL(relativeURL);
104
105 if (!relativeParts) {
106 throw new Error('Error trying to parse relative URL.');
107 }
108
109 if (relativeParts.scheme) {
110 // 2b) If the embedded URL starts with a scheme name, it is
111 // interpreted as an absolute URL and we are done.
112 if (!opts.alwaysNormalize) {
113 return relativeURL;
114 }
115
116 relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
117 return URLToolkit.buildURLFromParts(relativeParts);
118 }
119
120 var baseParts = URLToolkit.parseURL(baseURL);
121
122 if (!baseParts) {
123 throw new Error('Error trying to parse base URL.');
124 }
125
126 if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
127 // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
128 // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
129 var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
130 baseParts.netLoc = pathParts[1];
131 baseParts.path = pathParts[2];
132 }
133
134 if (baseParts.netLoc && !baseParts.path) {
135 baseParts.path = '/';
136 }
137
138 var builtParts = {
139 // 2c) Otherwise, the embedded URL inherits the scheme of
140 // the base URL.
141 scheme: baseParts.scheme,
142 netLoc: relativeParts.netLoc,
143 path: null,
144 params: relativeParts.params,
145 query: relativeParts.query,
146 fragment: relativeParts.fragment
147 };
148
149 if (!relativeParts.netLoc) {
150 // 3) If the embedded URL's <net_loc> is non-empty, we skip to
151 // Step 7. Otherwise, the embedded URL inherits the <net_loc>
152 // (if any) of the base URL.
153 builtParts.netLoc = baseParts.netLoc; // 4) If the embedded URL path is preceded by a slash "/", the
154 // path is not relative and we skip to Step 7.
155
156 if (relativeParts.path[0] !== '/') {
157 if (!relativeParts.path) {
158 // 5) If the embedded URL path is empty (and not preceded by a
159 // slash), then the embedded URL inherits the base URL path
160 builtParts.path = baseParts.path; // 5a) if the embedded URL's <params> is non-empty, we skip to
161 // step 7; otherwise, it inherits the <params> of the base
162 // URL (if any) and
163
164 if (!relativeParts.params) {
165 builtParts.params = baseParts.params; // 5b) if the embedded URL's <query> is non-empty, we skip to
166 // step 7; otherwise, it inherits the <query> of the base
167 // URL (if any) and we skip to step 7.
168
169 if (!relativeParts.query) {
170 builtParts.query = baseParts.query;
171 }
172 }
173 } else {
174 // 6) The last segment of the base URL's path (anything
175 // following the rightmost slash "/", or the entire path if no
176 // slash is present) is removed and the embedded URL's path is
177 // appended in its place.
178 var baseURLPath = baseParts.path;
179 var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;
180 builtParts.path = URLToolkit.normalizePath(newPath);
181 }
182 }
183 }
184
185 if (builtParts.path === null) {
186 builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;
187 }
188
189 return URLToolkit.buildURLFromParts(builtParts);
190 },
191 parseURL: function parseURL(url) {
192 var parts = URL_REGEX.exec(url);
193
194 if (!parts) {
195 return null;
196 }
197
198 return {
199 scheme: parts[1] || '',
200 netLoc: parts[2] || '',
201 path: parts[3] || '',
202 params: parts[4] || '',
203 query: parts[5] || '',
204 fragment: parts[6] || ''
205 };
206 },
207 normalizePath: function normalizePath(path) {
208 // The following operations are
209 // then applied, in order, to the new path:
210 // 6a) All occurrences of "./", where "." is a complete path
211 // segment, are removed.
212 // 6b) If the path ends with "." as a complete path segment,
213 // that "." is removed.
214 path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, ''); // 6c) All occurrences of "<segment>/../", where <segment> is a
215 // complete path segment not equal to "..", are removed.
216 // Removal of these path segments is performed iteratively,
217 // removing the leftmost matching pattern on each iteration,
218 // until no matching pattern remains.
219 // 6d) If the path ends with "<segment>/..", where <segment> is a
220 // complete path segment not equal to "..", that
221 // "<segment>/.." is removed.
222
223 while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}
224
225 return path.split('').reverse().join('');
226 },
227 buildURLFromParts: function buildURLFromParts(parts) {
228 return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;
229 }
230 };
231 module.exports = URLToolkit;
232 })();
233 });
234
235 var DEFAULT_LOCATION = 'http://example.com';
236
237 var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {
238 // return early if we don't need to resolve
239 if (/^[a-z]+:/i.test(relativeUrl)) {
240 return relativeUrl;
241 } // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
242
243
244 if (/^data:/.test(baseUrl)) {
245 baseUrl = window.location && window.location.href || '';
246 } // IE11 supports URL but not the URL constructor
247 // feature detect the behavior we want
248
249
250 var nativeURL = typeof window.URL === 'function';
251 var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
252 // and if baseUrl isn't an absolute url
253
254 var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
255
256 if (nativeURL) {
257 baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
258 } else if (!/\/\//i.test(baseUrl)) {
259 baseUrl = urlToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
260 }
261
262 if (nativeURL) {
263 var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
264 // and if we're location-less, remove the location
265 // otherwise, return the url unmodified
266
267 if (removeLocation) {
268 return newUrl.href.slice(DEFAULT_LOCATION.length);
269 } else if (protocolLess) {
270 return newUrl.href.slice(newUrl.protocol.length);
271 }
272
273 return newUrl.href;
274 }
275
276 return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
277 };
278
279 /**
280 * @file resolve-url.js - Handling how URLs are resolved and manipulated
281 */
282 var resolveUrl = resolveUrl$1;
283 /**
284 * Checks whether xhr request was redirected and returns correct url depending
285 * on `handleManifestRedirects` option
286 *
287 * @api private
288 *
289 * @param {string} url - an url being requested
290 * @param {XMLHttpRequest} req - xhr request result
291 *
292 * @return {string}
293 */
294
295 var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
296 // To understand how the responseURL below is set and generated:
297 // - https://fetch.spec.whatwg.org/#concept-response-url
298 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
299 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
300 return req.responseURL;
301 }
302
303 return url;
304 };
305
306 var logger = function logger(source) {
307 if (videojs__default["default"].log.debug) {
308 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', source + " >");
309 }
310
311 return function () {};
312 };
313
314 var _extends_1 = createCommonjsModule(function (module) {
315 function _extends() {
316 module.exports = _extends = Object.assign || function (target) {
317 for (var i = 1; i < arguments.length; i++) {
318 var source = arguments[i];
319
320 for (var key in source) {
321 if (Object.prototype.hasOwnProperty.call(source, key)) {
322 target[key] = source[key];
323 }
324 }
325 }
326
327 return target;
328 };
329
330 module.exports["default"] = module.exports, module.exports.__esModule = true;
331 return _extends.apply(this, arguments);
332 }
333
334 module.exports = _extends;
335 module.exports["default"] = module.exports, module.exports.__esModule = true;
336 });
337
338 /**
339 * @file stream.js
340 */
341
342 /**
343 * A lightweight readable stream implemention that handles event dispatching.
344 *
345 * @class Stream
346 */
347 var Stream = /*#__PURE__*/function () {
348 function Stream() {
349 this.listeners = {};
350 }
351 /**
352 * Add a listener for a specified event type.
353 *
354 * @param {string} type the event name
355 * @param {Function} listener the callback to be invoked when an event of
356 * the specified type occurs
357 */
358
359
360 var _proto = Stream.prototype;
361
362 _proto.on = function on(type, listener) {
363 if (!this.listeners[type]) {
364 this.listeners[type] = [];
365 }
366
367 this.listeners[type].push(listener);
368 }
369 /**
370 * Remove a listener for a specified event type.
371 *
372 * @param {string} type the event name
373 * @param {Function} listener a function previously registered for this
374 * type of event through `on`
375 * @return {boolean} if we could turn it off or not
376 */
377 ;
378
379 _proto.off = function off(type, listener) {
380 if (!this.listeners[type]) {
381 return false;
382 }
383
384 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
385 // In Video.js we slice listener functions
386 // on trigger so that it does not mess up the order
387 // while we loop through.
388 //
389 // Here we slice on off so that the loop in trigger
390 // can continue using it's old reference to loop without
391 // messing up the order.
392
393 this.listeners[type] = this.listeners[type].slice(0);
394 this.listeners[type].splice(index, 1);
395 return index > -1;
396 }
397 /**
398 * Trigger an event of the specified type on this stream. Any additional
399 * arguments to this function are passed as parameters to event listeners.
400 *
401 * @param {string} type the event name
402 */
403 ;
404
405 _proto.trigger = function trigger(type) {
406 var callbacks = this.listeners[type];
407
408 if (!callbacks) {
409 return;
410 } // Slicing the arguments on every invocation of this method
411 // can add a significant amount of overhead. Avoid the
412 // intermediate object creation for the common case of a
413 // single callback argument
414
415
416 if (arguments.length === 2) {
417 var length = callbacks.length;
418
419 for (var i = 0; i < length; ++i) {
420 callbacks[i].call(this, arguments[1]);
421 }
422 } else {
423 var args = Array.prototype.slice.call(arguments, 1);
424 var _length = callbacks.length;
425
426 for (var _i = 0; _i < _length; ++_i) {
427 callbacks[_i].apply(this, args);
428 }
429 }
430 }
431 /**
432 * Destroys the stream and cleans up.
433 */
434 ;
435
436 _proto.dispose = function dispose() {
437 this.listeners = {};
438 }
439 /**
440 * Forwards all `data` events on this stream to the destination stream. The
441 * destination stream should provide a method `push` to receive the data
442 * events as they arrive.
443 *
444 * @param {Stream} destination the stream that will receive all `data` events
445 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
446 */
447 ;
448
449 _proto.pipe = function pipe(destination) {
450 this.on('data', function (data) {
451 destination.push(data);
452 });
453 };
454
455 return Stream;
456 }();
457
458 var atob = function atob(s) {
459 return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
460 };
461
462 function decodeB64ToUint8Array(b64Text) {
463 var decodedString = atob(b64Text);
464 var array = new Uint8Array(decodedString.length);
465
466 for (var i = 0; i < decodedString.length; i++) {
467 array[i] = decodedString.charCodeAt(i);
468 }
469
470 return array;
471 }
472
473 /*! @name m3u8-parser @version 4.8.0 @license Apache-2.0 */
474 /**
475 * A stream that buffers string input and generates a `data` event for each
476 * line.
477 *
478 * @class LineStream
479 * @extends Stream
480 */
481
482 var LineStream = /*#__PURE__*/function (_Stream) {
483 inheritsLoose(LineStream, _Stream);
484
485 function LineStream() {
486 var _this;
487
488 _this = _Stream.call(this) || this;
489 _this.buffer = '';
490 return _this;
491 }
492 /**
493 * Add new data to be parsed.
494 *
495 * @param {string} data the text to process
496 */
497
498
499 var _proto = LineStream.prototype;
500
501 _proto.push = function push(data) {
502 var nextNewline;
503 this.buffer += data;
504 nextNewline = this.buffer.indexOf('\n');
505
506 for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {
507 this.trigger('data', this.buffer.substring(0, nextNewline));
508 this.buffer = this.buffer.substring(nextNewline + 1);
509 }
510 };
511
512 return LineStream;
513 }(Stream);
514
515 var TAB = String.fromCharCode(0x09);
516
517 var parseByterange = function parseByterange(byterangeString) {
518 // optionally match and capture 0+ digits before `@`
519 // optionally match and capture 0+ digits after `@`
520 var match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');
521 var result = {};
522
523 if (match[1]) {
524 result.length = parseInt(match[1], 10);
525 }
526
527 if (match[2]) {
528 result.offset = parseInt(match[2], 10);
529 }
530
531 return result;
532 };
533 /**
534 * "forgiving" attribute list psuedo-grammar:
535 * attributes -> keyvalue (',' keyvalue)*
536 * keyvalue -> key '=' value
537 * key -> [^=]*
538 * value -> '"' [^"]* '"' | [^,]*
539 */
540
541
542 var attributeSeparator = function attributeSeparator() {
543 var key = '[^=]*';
544 var value = '"[^"]*"|[^,]*';
545 var keyvalue = '(?:' + key + ')=(?:' + value + ')';
546 return new RegExp('(?:^|,)(' + keyvalue + ')');
547 };
548 /**
549 * Parse attributes from a line given the separator
550 *
551 * @param {string} attributes the attribute line to parse
552 */
553
554
555 var parseAttributes$1 = function parseAttributes(attributes) {
556 // split the string using attributes as the separator
557 var attrs = attributes.split(attributeSeparator());
558 var result = {};
559 var i = attrs.length;
560 var attr;
561
562 while (i--) {
563 // filter out unmatched portions of the string
564 if (attrs[i] === '') {
565 continue;
566 } // split the key and value
567
568
569 attr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the value
570
571 attr[0] = attr[0].replace(/^\s+|\s+$/g, '');
572 attr[1] = attr[1].replace(/^\s+|\s+$/g, '');
573 attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');
574 result[attr[0]] = attr[1];
575 }
576
577 return result;
578 };
579 /**
580 * A line-level M3U8 parser event stream. It expects to receive input one
581 * line at a time and performs a context-free parse of its contents. A stream
582 * interpretation of a manifest can be useful if the manifest is expected to
583 * be too large to fit comfortably into memory or the entirety of the input
584 * is not immediately available. Otherwise, it's probably much easier to work
585 * with a regular `Parser` object.
586 *
587 * Produces `data` events with an object that captures the parser's
588 * interpretation of the input. That object has a property `tag` that is one
589 * of `uri`, `comment`, or `tag`. URIs only have a single additional
590 * property, `line`, which captures the entirety of the input without
591 * interpretation. Comments similarly have a single additional property
592 * `text` which is the input without the leading `#`.
593 *
594 * Tags always have a property `tagType` which is the lower-cased version of
595 * the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,
596 * `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized
597 * tags are given the tag type `unknown` and a single additional property
598 * `data` with the remainder of the input.
599 *
600 * @class ParseStream
601 * @extends Stream
602 */
603
604
605 var ParseStream = /*#__PURE__*/function (_Stream) {
606 inheritsLoose(ParseStream, _Stream);
607
608 function ParseStream() {
609 var _this;
610
611 _this = _Stream.call(this) || this;
612 _this.customParsers = [];
613 _this.tagMappers = [];
614 return _this;
615 }
616 /**
617 * Parses an additional line of input.
618 *
619 * @param {string} line a single line of an M3U8 file to parse
620 */
621
622
623 var _proto = ParseStream.prototype;
624
625 _proto.push = function push(line) {
626 var _this2 = this;
627
628 var match;
629 var event; // strip whitespace
630
631 line = line.trim();
632
633 if (line.length === 0) {
634 // ignore empty lines
635 return;
636 } // URIs
637
638
639 if (line[0] !== '#') {
640 this.trigger('data', {
641 type: 'uri',
642 uri: line
643 });
644 return;
645 } // map tags
646
647
648 var newLines = this.tagMappers.reduce(function (acc, mapper) {
649 var mappedLine = mapper(line); // skip if unchanged
650
651 if (mappedLine === line) {
652 return acc;
653 }
654
655 return acc.concat([mappedLine]);
656 }, [line]);
657 newLines.forEach(function (newLine) {
658 for (var i = 0; i < _this2.customParsers.length; i++) {
659 if (_this2.customParsers[i].call(_this2, newLine)) {
660 return;
661 }
662 } // Comments
663
664
665 if (newLine.indexOf('#EXT') !== 0) {
666 _this2.trigger('data', {
667 type: 'comment',
668 text: newLine.slice(1)
669 });
670
671 return;
672 } // strip off any carriage returns here so the regex matching
673 // doesn't have to account for them.
674
675
676 newLine = newLine.replace('\r', ''); // Tags
677
678 match = /^#EXTM3U/.exec(newLine);
679
680 if (match) {
681 _this2.trigger('data', {
682 type: 'tag',
683 tagType: 'm3u'
684 });
685
686 return;
687 }
688
689 match = /^#EXTINF:?([0-9\.]*)?,?(.*)?$/.exec(newLine);
690
691 if (match) {
692 event = {
693 type: 'tag',
694 tagType: 'inf'
695 };
696
697 if (match[1]) {
698 event.duration = parseFloat(match[1]);
699 }
700
701 if (match[2]) {
702 event.title = match[2];
703 }
704
705 _this2.trigger('data', event);
706
707 return;
708 }
709
710 match = /^#EXT-X-TARGETDURATION:?([0-9.]*)?/.exec(newLine);
711
712 if (match) {
713 event = {
714 type: 'tag',
715 tagType: 'targetduration'
716 };
717
718 if (match[1]) {
719 event.duration = parseInt(match[1], 10);
720 }
721
722 _this2.trigger('data', event);
723
724 return;
725 }
726
727 match = /^#EXT-X-VERSION:?([0-9.]*)?/.exec(newLine);
728
729 if (match) {
730 event = {
731 type: 'tag',
732 tagType: 'version'
733 };
734
735 if (match[1]) {
736 event.version = parseInt(match[1], 10);
737 }
738
739 _this2.trigger('data', event);
740
741 return;
742 }
743
744 match = /^#EXT-X-MEDIA-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
745
746 if (match) {
747 event = {
748 type: 'tag',
749 tagType: 'media-sequence'
750 };
751
752 if (match[1]) {
753 event.number = parseInt(match[1], 10);
754 }
755
756 _this2.trigger('data', event);
757
758 return;
759 }
760
761 match = /^#EXT-X-DISCONTINUITY-SEQUENCE:?(\-?[0-9.]*)?/.exec(newLine);
762
763 if (match) {
764 event = {
765 type: 'tag',
766 tagType: 'discontinuity-sequence'
767 };
768
769 if (match[1]) {
770 event.number = parseInt(match[1], 10);
771 }
772
773 _this2.trigger('data', event);
774
775 return;
776 }
777
778 match = /^#EXT-X-PLAYLIST-TYPE:?(.*)?$/.exec(newLine);
779
780 if (match) {
781 event = {
782 type: 'tag',
783 tagType: 'playlist-type'
784 };
785
786 if (match[1]) {
787 event.playlistType = match[1];
788 }
789
790 _this2.trigger('data', event);
791
792 return;
793 }
794
795 match = /^#EXT-X-BYTERANGE:?(.*)?$/.exec(newLine);
796
797 if (match) {
798 event = _extends_1(parseByterange(match[1]), {
799 type: 'tag',
800 tagType: 'byterange'
801 });
802
803 _this2.trigger('data', event);
804
805 return;
806 }
807
808 match = /^#EXT-X-ALLOW-CACHE:?(YES|NO)?/.exec(newLine);
809
810 if (match) {
811 event = {
812 type: 'tag',
813 tagType: 'allow-cache'
814 };
815
816 if (match[1]) {
817 event.allowed = !/NO/.test(match[1]);
818 }
819
820 _this2.trigger('data', event);
821
822 return;
823 }
824
825 match = /^#EXT-X-MAP:?(.*)$/.exec(newLine);
826
827 if (match) {
828 event = {
829 type: 'tag',
830 tagType: 'map'
831 };
832
833 if (match[1]) {
834 var attributes = parseAttributes$1(match[1]);
835
836 if (attributes.URI) {
837 event.uri = attributes.URI;
838 }
839
840 if (attributes.BYTERANGE) {
841 event.byterange = parseByterange(attributes.BYTERANGE);
842 }
843 }
844
845 _this2.trigger('data', event);
846
847 return;
848 }
849
850 match = /^#EXT-X-STREAM-INF:?(.*)$/.exec(newLine);
851
852 if (match) {
853 event = {
854 type: 'tag',
855 tagType: 'stream-inf'
856 };
857
858 if (match[1]) {
859 event.attributes = parseAttributes$1(match[1]);
860
861 if (event.attributes.RESOLUTION) {
862 var split = event.attributes.RESOLUTION.split('x');
863 var resolution = {};
864
865 if (split[0]) {
866 resolution.width = parseInt(split[0], 10);
867 }
868
869 if (split[1]) {
870 resolution.height = parseInt(split[1], 10);
871 }
872
873 event.attributes.RESOLUTION = resolution;
874 }
875
876 if (event.attributes.BANDWIDTH) {
877 event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);
878 }
879
880 if (event.attributes['FRAME-RATE']) {
881 event.attributes['FRAME-RATE'] = parseFloat(event.attributes['FRAME-RATE']);
882 }
883
884 if (event.attributes['PROGRAM-ID']) {
885 event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);
886 }
887 }
888
889 _this2.trigger('data', event);
890
891 return;
892 }
893
894 match = /^#EXT-X-MEDIA:?(.*)$/.exec(newLine);
895
896 if (match) {
897 event = {
898 type: 'tag',
899 tagType: 'media'
900 };
901
902 if (match[1]) {
903 event.attributes = parseAttributes$1(match[1]);
904 }
905
906 _this2.trigger('data', event);
907
908 return;
909 }
910
911 match = /^#EXT-X-ENDLIST/.exec(newLine);
912
913 if (match) {
914 _this2.trigger('data', {
915 type: 'tag',
916 tagType: 'endlist'
917 });
918
919 return;
920 }
921
922 match = /^#EXT-X-DISCONTINUITY/.exec(newLine);
923
924 if (match) {
925 _this2.trigger('data', {
926 type: 'tag',
927 tagType: 'discontinuity'
928 });
929
930 return;
931 }
932
933 match = /^#EXT-X-PROGRAM-DATE-TIME:?(.*)$/.exec(newLine);
934
935 if (match) {
936 event = {
937 type: 'tag',
938 tagType: 'program-date-time'
939 };
940
941 if (match[1]) {
942 event.dateTimeString = match[1];
943 event.dateTimeObject = new Date(match[1]);
944 }
945
946 _this2.trigger('data', event);
947
948 return;
949 }
950
951 match = /^#EXT-X-KEY:?(.*)$/.exec(newLine);
952
953 if (match) {
954 event = {
955 type: 'tag',
956 tagType: 'key'
957 };
958
959 if (match[1]) {
960 event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Array
961
962 if (event.attributes.IV) {
963 if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {
964 event.attributes.IV = event.attributes.IV.substring(2);
965 }
966
967 event.attributes.IV = event.attributes.IV.match(/.{8}/g);
968 event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);
969 event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);
970 event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);
971 event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);
972 event.attributes.IV = new Uint32Array(event.attributes.IV);
973 }
974 }
975
976 _this2.trigger('data', event);
977
978 return;
979 }
980
981 match = /^#EXT-X-START:?(.*)$/.exec(newLine);
982
983 if (match) {
984 event = {
985 type: 'tag',
986 tagType: 'start'
987 };
988
989 if (match[1]) {
990 event.attributes = parseAttributes$1(match[1]);
991 event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);
992 event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);
993 }
994
995 _this2.trigger('data', event);
996
997 return;
998 }
999
1000 match = /^#EXT-X-CUE-OUT-CONT:?(.*)?$/.exec(newLine);
1001
1002 if (match) {
1003 event = {
1004 type: 'tag',
1005 tagType: 'cue-out-cont'
1006 };
1007
1008 if (match[1]) {
1009 event.data = match[1];
1010 } else {
1011 event.data = '';
1012 }
1013
1014 _this2.trigger('data', event);
1015
1016 return;
1017 }
1018
1019 match = /^#EXT-X-CUE-OUT:?(.*)?$/.exec(newLine);
1020
1021 if (match) {
1022 event = {
1023 type: 'tag',
1024 tagType: 'cue-out'
1025 };
1026
1027 if (match[1]) {
1028 event.data = match[1];
1029 } else {
1030 event.data = '';
1031 }
1032
1033 _this2.trigger('data', event);
1034
1035 return;
1036 }
1037
1038 match = /^#EXT-X-CUE-IN:?(.*)?$/.exec(newLine);
1039
1040 if (match) {
1041 event = {
1042 type: 'tag',
1043 tagType: 'cue-in'
1044 };
1045
1046 if (match[1]) {
1047 event.data = match[1];
1048 } else {
1049 event.data = '';
1050 }
1051
1052 _this2.trigger('data', event);
1053
1054 return;
1055 }
1056
1057 match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);
1058
1059 if (match && match[1]) {
1060 event = {
1061 type: 'tag',
1062 tagType: 'skip'
1063 };
1064 event.attributes = parseAttributes$1(match[1]);
1065
1066 if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {
1067 event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);
1068 }
1069
1070 if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {
1071 event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);
1072 }
1073
1074 _this2.trigger('data', event);
1075
1076 return;
1077 }
1078
1079 match = /^#EXT-X-PART:(.*)$/.exec(newLine);
1080
1081 if (match && match[1]) {
1082 event = {
1083 type: 'tag',
1084 tagType: 'part'
1085 };
1086 event.attributes = parseAttributes$1(match[1]);
1087 ['DURATION'].forEach(function (key) {
1088 if (event.attributes.hasOwnProperty(key)) {
1089 event.attributes[key] = parseFloat(event.attributes[key]);
1090 }
1091 });
1092 ['INDEPENDENT', 'GAP'].forEach(function (key) {
1093 if (event.attributes.hasOwnProperty(key)) {
1094 event.attributes[key] = /YES/.test(event.attributes[key]);
1095 }
1096 });
1097
1098 if (event.attributes.hasOwnProperty('BYTERANGE')) {
1099 event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);
1100 }
1101
1102 _this2.trigger('data', event);
1103
1104 return;
1105 }
1106
1107 match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);
1108
1109 if (match && match[1]) {
1110 event = {
1111 type: 'tag',
1112 tagType: 'server-control'
1113 };
1114 event.attributes = parseAttributes$1(match[1]);
1115 ['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {
1116 if (event.attributes.hasOwnProperty(key)) {
1117 event.attributes[key] = parseFloat(event.attributes[key]);
1118 }
1119 });
1120 ['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {
1121 if (event.attributes.hasOwnProperty(key)) {
1122 event.attributes[key] = /YES/.test(event.attributes[key]);
1123 }
1124 });
1125
1126 _this2.trigger('data', event);
1127
1128 return;
1129 }
1130
1131 match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);
1132
1133 if (match && match[1]) {
1134 event = {
1135 type: 'tag',
1136 tagType: 'part-inf'
1137 };
1138 event.attributes = parseAttributes$1(match[1]);
1139 ['PART-TARGET'].forEach(function (key) {
1140 if (event.attributes.hasOwnProperty(key)) {
1141 event.attributes[key] = parseFloat(event.attributes[key]);
1142 }
1143 });
1144
1145 _this2.trigger('data', event);
1146
1147 return;
1148 }
1149
1150 match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);
1151
1152 if (match && match[1]) {
1153 event = {
1154 type: 'tag',
1155 tagType: 'preload-hint'
1156 };
1157 event.attributes = parseAttributes$1(match[1]);
1158 ['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {
1159 if (event.attributes.hasOwnProperty(key)) {
1160 event.attributes[key] = parseInt(event.attributes[key], 10);
1161 var subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';
1162 event.attributes.byterange = event.attributes.byterange || {};
1163 event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.
1164
1165 delete event.attributes[key];
1166 }
1167 });
1168
1169 _this2.trigger('data', event);
1170
1171 return;
1172 }
1173
1174 match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);
1175
1176 if (match && match[1]) {
1177 event = {
1178 type: 'tag',
1179 tagType: 'rendition-report'
1180 };
1181 event.attributes = parseAttributes$1(match[1]);
1182 ['LAST-MSN', 'LAST-PART'].forEach(function (key) {
1183 if (event.attributes.hasOwnProperty(key)) {
1184 event.attributes[key] = parseInt(event.attributes[key], 10);
1185 }
1186 });
1187
1188 _this2.trigger('data', event);
1189
1190 return;
1191 } // unknown tag type
1192
1193
1194 _this2.trigger('data', {
1195 type: 'tag',
1196 data: newLine.slice(4)
1197 });
1198 });
1199 }
1200 /**
1201 * Add a parser for custom headers
1202 *
1203 * @param {Object} options a map of options for the added parser
1204 * @param {RegExp} options.expression a regular expression to match the custom header
1205 * @param {string} options.customType the custom type to register to the output
1206 * @param {Function} [options.dataParser] function to parse the line into an object
1207 * @param {boolean} [options.segment] should tag data be attached to the segment object
1208 */
1209 ;
1210
1211 _proto.addParser = function addParser(_ref) {
1212 var _this3 = this;
1213
1214 var expression = _ref.expression,
1215 customType = _ref.customType,
1216 dataParser = _ref.dataParser,
1217 segment = _ref.segment;
1218
1219 if (typeof dataParser !== 'function') {
1220 dataParser = function dataParser(line) {
1221 return line;
1222 };
1223 }
1224
1225 this.customParsers.push(function (line) {
1226 var match = expression.exec(line);
1227
1228 if (match) {
1229 _this3.trigger('data', {
1230 type: 'custom',
1231 data: dataParser(line),
1232 customType: customType,
1233 segment: segment
1234 });
1235
1236 return true;
1237 }
1238 });
1239 }
1240 /**
1241 * Add a custom header mapper
1242 *
1243 * @param {Object} options
1244 * @param {RegExp} options.expression a regular expression to match the custom header
1245 * @param {Function} options.map function to translate tag into a different tag
1246 */
1247 ;
1248
1249 _proto.addTagMapper = function addTagMapper(_ref2) {
1250 var expression = _ref2.expression,
1251 map = _ref2.map;
1252
1253 var mapFn = function mapFn(line) {
1254 if (expression.test(line)) {
1255 return map(line);
1256 }
1257
1258 return line;
1259 };
1260
1261 this.tagMappers.push(mapFn);
1262 };
1263
1264 return ParseStream;
1265 }(Stream);
1266
1267 var camelCase = function camelCase(str) {
1268 return str.toLowerCase().replace(/-(\w)/g, function (a) {
1269 return a[1].toUpperCase();
1270 });
1271 };
1272
1273 var camelCaseKeys = function camelCaseKeys(attributes) {
1274 var result = {};
1275 Object.keys(attributes).forEach(function (key) {
1276 result[camelCase(key)] = attributes[key];
1277 });
1278 return result;
1279 }; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration
1280 // we need this helper because defaults are based upon targetDuration and
1281 // partTargetDuration being set, but they may not be if SERVER-CONTROL appears before
1282 // target durations are set.
1283
1284
1285 var setHoldBack = function setHoldBack(manifest) {
1286 var serverControl = manifest.serverControl,
1287 targetDuration = manifest.targetDuration,
1288 partTargetDuration = manifest.partTargetDuration;
1289
1290 if (!serverControl) {
1291 return;
1292 }
1293
1294 var tag = '#EXT-X-SERVER-CONTROL';
1295 var hb = 'holdBack';
1296 var phb = 'partHoldBack';
1297 var minTargetDuration = targetDuration && targetDuration * 3;
1298 var minPartDuration = partTargetDuration && partTargetDuration * 2;
1299
1300 if (targetDuration && !serverControl.hasOwnProperty(hb)) {
1301 serverControl[hb] = minTargetDuration;
1302 this.trigger('info', {
1303 message: tag + " defaulting HOLD-BACK to targetDuration * 3 (" + minTargetDuration + ")."
1304 });
1305 }
1306
1307 if (minTargetDuration && serverControl[hb] < minTargetDuration) {
1308 this.trigger('warn', {
1309 message: tag + " clamping HOLD-BACK (" + serverControl[hb] + ") to targetDuration * 3 (" + minTargetDuration + ")"
1310 });
1311 serverControl[hb] = minTargetDuration;
1312 } // default no part hold back to part target duration * 3
1313
1314
1315 if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {
1316 serverControl[phb] = partTargetDuration * 3;
1317 this.trigger('info', {
1318 message: tag + " defaulting PART-HOLD-BACK to partTargetDuration * 3 (" + serverControl[phb] + ")."
1319 });
1320 } // if part hold back is too small default it to part target duration * 2
1321
1322
1323 if (partTargetDuration && serverControl[phb] < minPartDuration) {
1324 this.trigger('warn', {
1325 message: tag + " clamping PART-HOLD-BACK (" + serverControl[phb] + ") to partTargetDuration * 2 (" + minPartDuration + ")."
1326 });
1327 serverControl[phb] = minPartDuration;
1328 }
1329 };
1330 /**
1331 * A parser for M3U8 files. The current interpretation of the input is
1332 * exposed as a property `manifest` on parser objects. It's just two lines to
1333 * create and parse a manifest once you have the contents available as a string:
1334 *
1335 * ```js
1336 * var parser = new m3u8.Parser();
1337 * parser.push(xhr.responseText);
1338 * ```
1339 *
1340 * New input can later be applied to update the manifest object by calling
1341 * `push` again.
1342 *
1343 * The parser attempts to create a usable manifest object even if the
1344 * underlying input is somewhat nonsensical. It emits `info` and `warning`
1345 * events during the parse if it encounters input that seems invalid or
1346 * requires some property of the manifest object to be defaulted.
1347 *
1348 * @class Parser
1349 * @extends Stream
1350 */
1351
1352
1353 var Parser = /*#__PURE__*/function (_Stream) {
1354 inheritsLoose(Parser, _Stream);
1355
1356 function Parser() {
1357 var _this;
1358
1359 _this = _Stream.call(this) || this;
1360 _this.lineStream = new LineStream();
1361 _this.parseStream = new ParseStream();
1362
1363 _this.lineStream.pipe(_this.parseStream);
1364 /* eslint-disable consistent-this */
1365
1366
1367 var self = assertThisInitialized(_this);
1368 /* eslint-enable consistent-this */
1369
1370
1371 var uris = [];
1372 var currentUri = {}; // if specified, the active EXT-X-MAP definition
1373
1374 var currentMap; // if specified, the active decryption key
1375
1376 var _key;
1377
1378 var hasParts = false;
1379
1380 var noop = function noop() {};
1381
1382 var defaultMediaGroups = {
1383 'AUDIO': {},
1384 'VIDEO': {},
1385 'CLOSED-CAPTIONS': {},
1386 'SUBTITLES': {}
1387 }; // This is the Widevine UUID from DASH IF IOP. The same exact string is
1388 // used in MPDs with Widevine encrypted streams.
1389
1390 var widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuities
1391
1392 var currentTimeline = 0; // the manifest is empty until the parse stream begins delivering data
1393
1394 _this.manifest = {
1395 allowCache: true,
1396 discontinuityStarts: [],
1397 segments: []
1398 }; // keep track of the last seen segment's byte range end, as segments are not required
1399 // to provide the offset, in which case it defaults to the next byte after the
1400 // previous segment
1401
1402 var lastByterangeEnd = 0; // keep track of the last seen part's byte range end.
1403
1404 var lastPartByterangeEnd = 0;
1405
1406 _this.on('end', function () {
1407 // only add preloadSegment if we don't yet have a uri for it.
1408 // and we actually have parts/preloadHints
1409 if (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {
1410 return;
1411 }
1412
1413 if (!currentUri.map && currentMap) {
1414 currentUri.map = currentMap;
1415 }
1416
1417 if (!currentUri.key && _key) {
1418 currentUri.key = _key;
1419 }
1420
1421 if (!currentUri.timeline && typeof currentTimeline === 'number') {
1422 currentUri.timeline = currentTimeline;
1423 }
1424
1425 _this.manifest.preloadSegment = currentUri;
1426 }); // update the manifest with the m3u8 entry from the parse stream
1427
1428
1429 _this.parseStream.on('data', function (entry) {
1430 var mediaGroup;
1431 var rendition;
1432 ({
1433 tag: function tag() {
1434 // switch based on the tag type
1435 (({
1436 version: function version() {
1437 if (entry.version) {
1438 this.manifest.version = entry.version;
1439 }
1440 },
1441 'allow-cache': function allowCache() {
1442 this.manifest.allowCache = entry.allowed;
1443
1444 if (!('allowed' in entry)) {
1445 this.trigger('info', {
1446 message: 'defaulting allowCache to YES'
1447 });
1448 this.manifest.allowCache = true;
1449 }
1450 },
1451 byterange: function byterange() {
1452 var byterange = {};
1453
1454 if ('length' in entry) {
1455 currentUri.byterange = byterange;
1456 byterange.length = entry.length;
1457
1458 if (!('offset' in entry)) {
1459 /*
1460 * From the latest spec (as of this writing):
1461 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2
1462 *
1463 * Same text since EXT-X-BYTERANGE's introduction in draft 7:
1464 * https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)
1465 *
1466 * "If o [offset] is not present, the sub-range begins at the next byte
1467 * following the sub-range of the previous media segment."
1468 */
1469 entry.offset = lastByterangeEnd;
1470 }
1471 }
1472
1473 if ('offset' in entry) {
1474 currentUri.byterange = byterange;
1475 byterange.offset = entry.offset;
1476 }
1477
1478 lastByterangeEnd = byterange.offset + byterange.length;
1479 },
1480 endlist: function endlist() {
1481 this.manifest.endList = true;
1482 },
1483 inf: function inf() {
1484 if (!('mediaSequence' in this.manifest)) {
1485 this.manifest.mediaSequence = 0;
1486 this.trigger('info', {
1487 message: 'defaulting media sequence to zero'
1488 });
1489 }
1490
1491 if (!('discontinuitySequence' in this.manifest)) {
1492 this.manifest.discontinuitySequence = 0;
1493 this.trigger('info', {
1494 message: 'defaulting discontinuity sequence to zero'
1495 });
1496 }
1497
1498 if (entry.duration > 0) {
1499 currentUri.duration = entry.duration;
1500 }
1501
1502 if (entry.duration === 0) {
1503 currentUri.duration = 0.01;
1504 this.trigger('info', {
1505 message: 'updating zero segment duration to a small value'
1506 });
1507 }
1508
1509 this.manifest.segments = uris;
1510 },
1511 key: function key() {
1512 if (!entry.attributes) {
1513 this.trigger('warn', {
1514 message: 'ignoring key declaration without attribute list'
1515 });
1516 return;
1517 } // clear the active encryption key
1518
1519
1520 if (entry.attributes.METHOD === 'NONE') {
1521 _key = null;
1522 return;
1523 }
1524
1525 if (!entry.attributes.URI) {
1526 this.trigger('warn', {
1527 message: 'ignoring key declaration without URI'
1528 });
1529 return;
1530 }
1531
1532 if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {
1533 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1534
1535 this.manifest.contentProtection['com.apple.fps.1_0'] = {
1536 attributes: entry.attributes
1537 };
1538 return;
1539 }
1540
1541 if (entry.attributes.KEYFORMAT === 'com.microsoft.playready') {
1542 this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.
1543
1544 this.manifest.contentProtection['com.microsoft.playready'] = {
1545 uri: entry.attributes.URI
1546 };
1547 return;
1548 } // check if the content is encrypted for Widevine
1549 // Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdf
1550
1551
1552 if (entry.attributes.KEYFORMAT === widevineUuid) {
1553 var VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];
1554
1555 if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {
1556 this.trigger('warn', {
1557 message: 'invalid key method provided for Widevine'
1558 });
1559 return;
1560 }
1561
1562 if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {
1563 this.trigger('warn', {
1564 message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'
1565 });
1566 }
1567
1568 if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {
1569 this.trigger('warn', {
1570 message: 'invalid key URI provided for Widevine'
1571 });
1572 return;
1573 }
1574
1575 if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {
1576 this.trigger('warn', {
1577 message: 'invalid key ID provided for Widevine'
1578 });
1579 return;
1580 } // if Widevine key attributes are valid, store them as `contentProtection`
1581 // on the manifest to emulate Widevine tag structure in a DASH mpd
1582
1583
1584 this.manifest.contentProtection = this.manifest.contentProtection || {};
1585 this.manifest.contentProtection['com.widevine.alpha'] = {
1586 attributes: {
1587 schemeIdUri: entry.attributes.KEYFORMAT,
1588 // remove '0x' from the key id string
1589 keyId: entry.attributes.KEYID.substring(2)
1590 },
1591 // decode the base64-encoded PSSH box
1592 pssh: decodeB64ToUint8Array(entry.attributes.URI.split(',')[1])
1593 };
1594 return;
1595 }
1596
1597 if (!entry.attributes.METHOD) {
1598 this.trigger('warn', {
1599 message: 'defaulting key method to AES-128'
1600 });
1601 } // setup an encryption key for upcoming segments
1602
1603
1604 _key = {
1605 method: entry.attributes.METHOD || 'AES-128',
1606 uri: entry.attributes.URI
1607 };
1608
1609 if (typeof entry.attributes.IV !== 'undefined') {
1610 _key.iv = entry.attributes.IV;
1611 }
1612 },
1613 'media-sequence': function mediaSequence() {
1614 if (!isFinite(entry.number)) {
1615 this.trigger('warn', {
1616 message: 'ignoring invalid media sequence: ' + entry.number
1617 });
1618 return;
1619 }
1620
1621 this.manifest.mediaSequence = entry.number;
1622 },
1623 'discontinuity-sequence': function discontinuitySequence() {
1624 if (!isFinite(entry.number)) {
1625 this.trigger('warn', {
1626 message: 'ignoring invalid discontinuity sequence: ' + entry.number
1627 });
1628 return;
1629 }
1630
1631 this.manifest.discontinuitySequence = entry.number;
1632 currentTimeline = entry.number;
1633 },
1634 'playlist-type': function playlistType() {
1635 if (!/VOD|EVENT/.test(entry.playlistType)) {
1636 this.trigger('warn', {
1637 message: 'ignoring unknown playlist type: ' + entry.playlist
1638 });
1639 return;
1640 }
1641
1642 this.manifest.playlistType = entry.playlistType;
1643 },
1644 map: function map() {
1645 currentMap = {};
1646
1647 if (entry.uri) {
1648 currentMap.uri = entry.uri;
1649 }
1650
1651 if (entry.byterange) {
1652 currentMap.byterange = entry.byterange;
1653 }
1654
1655 if (_key) {
1656 currentMap.key = _key;
1657 }
1658 },
1659 'stream-inf': function streamInf() {
1660 this.manifest.playlists = uris;
1661 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1662
1663 if (!entry.attributes) {
1664 this.trigger('warn', {
1665 message: 'ignoring empty stream-inf attributes'
1666 });
1667 return;
1668 }
1669
1670 if (!currentUri.attributes) {
1671 currentUri.attributes = {};
1672 }
1673
1674 _extends_1(currentUri.attributes, entry.attributes);
1675 },
1676 media: function media() {
1677 this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;
1678
1679 if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {
1680 this.trigger('warn', {
1681 message: 'ignoring incomplete or missing media group'
1682 });
1683 return;
1684 } // find the media group, creating defaults as necessary
1685
1686
1687 var mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];
1688 mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};
1689 mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadata
1690
1691 rendition = {
1692 default: /yes/i.test(entry.attributes.DEFAULT)
1693 };
1694
1695 if (rendition.default) {
1696 rendition.autoselect = true;
1697 } else {
1698 rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);
1699 }
1700
1701 if (entry.attributes.LANGUAGE) {
1702 rendition.language = entry.attributes.LANGUAGE;
1703 }
1704
1705 if (entry.attributes.URI) {
1706 rendition.uri = entry.attributes.URI;
1707 }
1708
1709 if (entry.attributes['INSTREAM-ID']) {
1710 rendition.instreamId = entry.attributes['INSTREAM-ID'];
1711 }
1712
1713 if (entry.attributes.CHARACTERISTICS) {
1714 rendition.characteristics = entry.attributes.CHARACTERISTICS;
1715 }
1716
1717 if (entry.attributes.FORCED) {
1718 rendition.forced = /yes/i.test(entry.attributes.FORCED);
1719 } // insert the new rendition
1720
1721
1722 mediaGroup[entry.attributes.NAME] = rendition;
1723 },
1724 discontinuity: function discontinuity() {
1725 currentTimeline += 1;
1726 currentUri.discontinuity = true;
1727 this.manifest.discontinuityStarts.push(uris.length);
1728 },
1729 'program-date-time': function programDateTime() {
1730 if (typeof this.manifest.dateTimeString === 'undefined') {
1731 // PROGRAM-DATE-TIME is a media-segment tag, but for backwards
1732 // compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag
1733 // to the manifest object
1734 // TODO: Consider removing this in future major version
1735 this.manifest.dateTimeString = entry.dateTimeString;
1736 this.manifest.dateTimeObject = entry.dateTimeObject;
1737 }
1738
1739 currentUri.dateTimeString = entry.dateTimeString;
1740 currentUri.dateTimeObject = entry.dateTimeObject;
1741 },
1742 targetduration: function targetduration() {
1743 if (!isFinite(entry.duration) || entry.duration < 0) {
1744 this.trigger('warn', {
1745 message: 'ignoring invalid target duration: ' + entry.duration
1746 });
1747 return;
1748 }
1749
1750 this.manifest.targetDuration = entry.duration;
1751 setHoldBack.call(this, this.manifest);
1752 },
1753 start: function start() {
1754 if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {
1755 this.trigger('warn', {
1756 message: 'ignoring start declaration without appropriate attribute list'
1757 });
1758 return;
1759 }
1760
1761 this.manifest.start = {
1762 timeOffset: entry.attributes['TIME-OFFSET'],
1763 precise: entry.attributes.PRECISE
1764 };
1765 },
1766 'cue-out': function cueOut() {
1767 currentUri.cueOut = entry.data;
1768 },
1769 'cue-out-cont': function cueOutCont() {
1770 currentUri.cueOutCont = entry.data;
1771 },
1772 'cue-in': function cueIn() {
1773 currentUri.cueIn = entry.data;
1774 },
1775 'skip': function skip() {
1776 this.manifest.skip = camelCaseKeys(entry.attributes);
1777 this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);
1778 },
1779 'part': function part() {
1780 var _this2 = this;
1781
1782 hasParts = true; // parts are always specifed before a segment
1783
1784 var segmentIndex = this.manifest.segments.length;
1785 var part = camelCaseKeys(entry.attributes);
1786 currentUri.parts = currentUri.parts || [];
1787 currentUri.parts.push(part);
1788
1789 if (part.byterange) {
1790 if (!part.byterange.hasOwnProperty('offset')) {
1791 part.byterange.offset = lastPartByterangeEnd;
1792 }
1793
1794 lastPartByterangeEnd = part.byterange.offset + part.byterange.length;
1795 }
1796
1797 var partIndex = currentUri.parts.length - 1;
1798 this.warnOnMissingAttributes_("#EXT-X-PART #" + partIndex + " for segment #" + segmentIndex, entry.attributes, ['URI', 'DURATION']);
1799
1800 if (this.manifest.renditionReports) {
1801 this.manifest.renditionReports.forEach(function (r, i) {
1802 if (!r.hasOwnProperty('lastPart')) {
1803 _this2.trigger('warn', {
1804 message: "#EXT-X-RENDITION-REPORT #" + i + " lacks required attribute(s): LAST-PART"
1805 });
1806 }
1807 });
1808 }
1809 },
1810 'server-control': function serverControl() {
1811 var attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);
1812
1813 if (!attrs.hasOwnProperty('canBlockReload')) {
1814 attrs.canBlockReload = false;
1815 this.trigger('info', {
1816 message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'
1817 });
1818 }
1819
1820 setHoldBack.call(this, this.manifest);
1821
1822 if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {
1823 this.trigger('warn', {
1824 message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'
1825 });
1826 }
1827 },
1828 'preload-hint': function preloadHint() {
1829 // parts are always specifed before a segment
1830 var segmentIndex = this.manifest.segments.length;
1831 var hint = camelCaseKeys(entry.attributes);
1832 var isPart = hint.type && hint.type === 'PART';
1833 currentUri.preloadHints = currentUri.preloadHints || [];
1834 currentUri.preloadHints.push(hint);
1835
1836 if (hint.byterange) {
1837 if (!hint.byterange.hasOwnProperty('offset')) {
1838 // use last part byterange end or zero if not a part.
1839 hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;
1840
1841 if (isPart) {
1842 lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;
1843 }
1844 }
1845 }
1846
1847 var index = currentUri.preloadHints.length - 1;
1848 this.warnOnMissingAttributes_("#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex, entry.attributes, ['TYPE', 'URI']);
1849
1850 if (!hint.type) {
1851 return;
1852 } // search through all preload hints except for the current one for
1853 // a duplicate type.
1854
1855
1856 for (var i = 0; i < currentUri.preloadHints.length - 1; i++) {
1857 var otherHint = currentUri.preloadHints[i];
1858
1859 if (!otherHint.type) {
1860 continue;
1861 }
1862
1863 if (otherHint.type === hint.type) {
1864 this.trigger('warn', {
1865 message: "#EXT-X-PRELOAD-HINT #" + index + " for segment #" + segmentIndex + " has the same TYPE " + hint.type + " as preload hint #" + i
1866 });
1867 }
1868 }
1869 },
1870 'rendition-report': function renditionReport() {
1871 var report = camelCaseKeys(entry.attributes);
1872 this.manifest.renditionReports = this.manifest.renditionReports || [];
1873 this.manifest.renditionReports.push(report);
1874 var index = this.manifest.renditionReports.length - 1;
1875 var required = ['LAST-MSN', 'URI'];
1876
1877 if (hasParts) {
1878 required.push('LAST-PART');
1879 }
1880
1881 this.warnOnMissingAttributes_("#EXT-X-RENDITION-REPORT #" + index, entry.attributes, required);
1882 },
1883 'part-inf': function partInf() {
1884 this.manifest.partInf = camelCaseKeys(entry.attributes);
1885 this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);
1886
1887 if (this.manifest.partInf.partTarget) {
1888 this.manifest.partTargetDuration = this.manifest.partInf.partTarget;
1889 }
1890
1891 setHoldBack.call(this, this.manifest);
1892 }
1893 })[entry.tagType] || noop).call(self);
1894 },
1895 uri: function uri() {
1896 currentUri.uri = entry.uri;
1897 uris.push(currentUri); // if no explicit duration was declared, use the target duration
1898
1899 if (this.manifest.targetDuration && !('duration' in currentUri)) {
1900 this.trigger('warn', {
1901 message: 'defaulting segment duration to the target duration'
1902 });
1903 currentUri.duration = this.manifest.targetDuration;
1904 } // annotate with encryption information, if necessary
1905
1906
1907 if (_key) {
1908 currentUri.key = _key;
1909 }
1910
1911 currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessary
1912
1913 if (currentMap) {
1914 currentUri.map = currentMap;
1915 } // reset the last byterange end as it needs to be 0 between parts
1916
1917
1918 lastPartByterangeEnd = 0; // prepare for the next URI
1919
1920 currentUri = {};
1921 },
1922 comment: function comment() {// comments are not important for playback
1923 },
1924 custom: function custom() {
1925 // if this is segment-level data attach the output to the segment
1926 if (entry.segment) {
1927 currentUri.custom = currentUri.custom || {};
1928 currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object
1929 } else {
1930 this.manifest.custom = this.manifest.custom || {};
1931 this.manifest.custom[entry.customType] = entry.data;
1932 }
1933 }
1934 })[entry.type].call(self);
1935 });
1936
1937 return _this;
1938 }
1939
1940 var _proto = Parser.prototype;
1941
1942 _proto.warnOnMissingAttributes_ = function warnOnMissingAttributes_(identifier, attributes, required) {
1943 var missing = [];
1944 required.forEach(function (key) {
1945 if (!attributes.hasOwnProperty(key)) {
1946 missing.push(key);
1947 }
1948 });
1949
1950 if (missing.length) {
1951 this.trigger('warn', {
1952 message: identifier + " lacks required attribute(s): " + missing.join(', ')
1953 });
1954 }
1955 }
1956 /**
1957 * Parse the input string and update the manifest object.
1958 *
1959 * @param {string} chunk a potentially incomplete portion of the manifest
1960 */
1961 ;
1962
1963 _proto.push = function push(chunk) {
1964 this.lineStream.push(chunk);
1965 }
1966 /**
1967 * Flush any remaining input. This can be handy if the last line of an M3U8
1968 * manifest did not contain a trailing newline but the file has been
1969 * completely received.
1970 */
1971 ;
1972
1973 _proto.end = function end() {
1974 // flush any buffered input
1975 this.lineStream.push('\n');
1976 this.trigger('end');
1977 }
1978 /**
1979 * Add an additional parser for non-standard tags
1980 *
1981 * @param {Object} options a map of options for the added parser
1982 * @param {RegExp} options.expression a regular expression to match the custom header
1983 * @param {string} options.type the type to register to the output
1984 * @param {Function} [options.dataParser] function to parse the line into an object
1985 * @param {boolean} [options.segment] should tag data be attached to the segment object
1986 */
1987 ;
1988
1989 _proto.addParser = function addParser(options) {
1990 this.parseStream.addParser(options);
1991 }
1992 /**
1993 * Add a custom header mapper
1994 *
1995 * @param {Object} options
1996 * @param {RegExp} options.expression a regular expression to match the custom header
1997 * @param {Function} options.map function to translate tag into a different tag
1998 */
1999 ;
2000
2001 _proto.addTagMapper = function addTagMapper(options) {
2002 this.parseStream.addTagMapper(options);
2003 };
2004
2005 return Parser;
2006 }(Stream);
2007
2008 var regexs = {
2009 // to determine mime types
2010 mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
2011 webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
2012 ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
2013 // to determine if a codec is audio or video
2014 video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
2015 audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
2016 text: /^(stpp.ttml.im1t)/,
2017 // mux.js support regex
2018 muxerVideo: /^(avc0?1)/,
2019 muxerAudio: /^(mp4a)/,
2020 // match nothing as muxer does not support text right now.
2021 // there cannot never be a character before the start of a string
2022 // so this matches nothing.
2023 muxerText: /a^/
2024 };
2025 var mediaTypes = ['video', 'audio', 'text'];
2026 var upperMediaTypes = ['Video', 'Audio', 'Text'];
2027 /**
2028 * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
2029 * `avc1.<hhhhhh>`
2030 *
2031 * @param {string} codec
2032 * Codec string to translate
2033 * @return {string}
2034 * The translated codec string
2035 */
2036
2037 var translateLegacyCodec = function translateLegacyCodec(codec) {
2038 if (!codec) {
2039 return codec;
2040 }
2041
2042 return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
2043 var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
2044 var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
2045 return 'avc1.' + profileHex + '00' + avcLevelHex;
2046 });
2047 };
2048 /**
2049 * @typedef {Object} ParsedCodecInfo
2050 * @property {number} codecCount
2051 * Number of codecs parsed
2052 * @property {string} [videoCodec]
2053 * Parsed video codec (if found)
2054 * @property {string} [videoObjectTypeIndicator]
2055 * Video object type indicator (if found)
2056 * @property {string|null} audioProfile
2057 * Audio profile
2058 */
2059
2060 /**
2061 * Parses a codec string to retrieve the number of codecs specified, the video codec and
2062 * object type indicator, and the audio profile.
2063 *
2064 * @param {string} [codecString]
2065 * The codec string to parse
2066 * @return {ParsedCodecInfo}
2067 * Parsed codec info
2068 */
2069
2070 var parseCodecs = function parseCodecs(codecString) {
2071 if (codecString === void 0) {
2072 codecString = '';
2073 }
2074
2075 var codecs = codecString.split(',');
2076 var result = [];
2077 codecs.forEach(function (codec) {
2078 codec = codec.trim();
2079 var codecType;
2080 mediaTypes.forEach(function (name) {
2081 var match = regexs[name].exec(codec.toLowerCase());
2082
2083 if (!match || match.length <= 1) {
2084 return;
2085 }
2086
2087 codecType = name; // maintain codec case
2088
2089 var type = codec.substring(0, match[1].length);
2090 var details = codec.replace(type, '');
2091 result.push({
2092 type: type,
2093 details: details,
2094 mediaType: name
2095 });
2096 });
2097
2098 if (!codecType) {
2099 result.push({
2100 type: codec,
2101 details: '',
2102 mediaType: 'unknown'
2103 });
2104 }
2105 });
2106 return result;
2107 };
2108 /**
2109 * Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
2110 * a default alternate audio playlist for the provided audio group.
2111 *
2112 * @param {Object} master
2113 * The master playlist
2114 * @param {string} audioGroupId
2115 * ID of the audio group for which to find the default codec info
2116 * @return {ParsedCodecInfo}
2117 * Parsed codec info
2118 */
2119
2120 var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
2121 if (!master.mediaGroups.AUDIO || !audioGroupId) {
2122 return null;
2123 }
2124
2125 var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
2126
2127 if (!audioGroup) {
2128 return null;
2129 }
2130
2131 for (var name in audioGroup) {
2132 var audioType = audioGroup[name];
2133
2134 if (audioType.default && audioType.playlists) {
2135 // codec should be the same for all playlists within the audio type
2136 return parseCodecs(audioType.playlists[0].attributes.CODECS);
2137 }
2138 }
2139
2140 return null;
2141 };
2142 var isAudioCodec = function isAudioCodec(codec) {
2143 if (codec === void 0) {
2144 codec = '';
2145 }
2146
2147 return regexs.audio.test(codec.trim().toLowerCase());
2148 };
2149 var isTextCodec = function isTextCodec(codec) {
2150 if (codec === void 0) {
2151 codec = '';
2152 }
2153
2154 return regexs.text.test(codec.trim().toLowerCase());
2155 };
2156 var getMimeForCodec = function getMimeForCodec(codecString) {
2157 if (!codecString || typeof codecString !== 'string') {
2158 return;
2159 }
2160
2161 var codecs = codecString.toLowerCase().split(',').map(function (c) {
2162 return translateLegacyCodec(c.trim());
2163 }); // default to video type
2164
2165 var type = 'video'; // only change to audio type if the only codec we have is
2166 // audio
2167
2168 if (codecs.length === 1 && isAudioCodec(codecs[0])) {
2169 type = 'audio';
2170 } else if (codecs.length === 1 && isTextCodec(codecs[0])) {
2171 // text uses application/<container> for now
2172 type = 'application';
2173 } // default the container to mp4
2174
2175
2176 var container = 'mp4'; // every codec must be able to go into the container
2177 // for that container to be the correct one
2178
2179 if (codecs.every(function (c) {
2180 return regexs.mp4.test(c);
2181 })) {
2182 container = 'mp4';
2183 } else if (codecs.every(function (c) {
2184 return regexs.webm.test(c);
2185 })) {
2186 container = 'webm';
2187 } else if (codecs.every(function (c) {
2188 return regexs.ogg.test(c);
2189 })) {
2190 container = 'ogg';
2191 }
2192
2193 return type + "/" + container + ";codecs=\"" + codecString + "\"";
2194 };
2195 var browserSupportsCodec = function browserSupportsCodec(codecString) {
2196 if (codecString === void 0) {
2197 codecString = '';
2198 }
2199
2200 return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
2201 };
2202 var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
2203 if (codecString === void 0) {
2204 codecString = '';
2205 }
2206
2207 return codecString.toLowerCase().split(',').every(function (codec) {
2208 codec = codec.trim(); // any match is supported.
2209
2210 for (var i = 0; i < upperMediaTypes.length; i++) {
2211 var type = upperMediaTypes[i];
2212
2213 if (regexs["muxer" + type].test(codec)) {
2214 return true;
2215 }
2216 }
2217
2218 return false;
2219 });
2220 };
2221 var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
2222 var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';
2223
2224 /**
2225 * ranges
2226 *
2227 * Utilities for working with TimeRanges.
2228 *
2229 */
2230
2231 var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
2232 // can be misleading because of precision differences or when the current media has poorly
2233 // aligned audio and video, which can cause values to be slightly off from what you would
2234 // expect. This value is what we consider to be safe to use in such comparisons to account
2235 // for these scenarios.
2236
2237 var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
2238
2239 var filterRanges = function filterRanges(timeRanges, predicate) {
2240 var results = [];
2241 var i;
2242
2243 if (timeRanges && timeRanges.length) {
2244 // Search for ranges that match the predicate
2245 for (i = 0; i < timeRanges.length; i++) {
2246 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
2247 results.push([timeRanges.start(i), timeRanges.end(i)]);
2248 }
2249 }
2250 }
2251
2252 return videojs__default["default"].createTimeRanges(results);
2253 };
2254 /**
2255 * Attempts to find the buffered TimeRange that contains the specified
2256 * time.
2257 *
2258 * @param {TimeRanges} buffered - the TimeRanges object to query
2259 * @param {number} time - the time to filter on.
2260 * @return {TimeRanges} a new TimeRanges object
2261 */
2262
2263
2264 var findRange = function findRange(buffered, time) {
2265 return filterRanges(buffered, function (start, end) {
2266 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
2267 });
2268 };
2269 /**
2270 * Returns the TimeRanges that begin later than the specified time.
2271 *
2272 * @param {TimeRanges} timeRanges - the TimeRanges object to query
2273 * @param {number} time - the time to filter on.
2274 * @return {TimeRanges} a new TimeRanges object.
2275 */
2276
2277 var findNextRange = function findNextRange(timeRanges, time) {
2278 return filterRanges(timeRanges, function (start) {
2279 return start - TIME_FUDGE_FACTOR >= time;
2280 });
2281 };
2282 /**
2283 * Returns gaps within a list of TimeRanges
2284 *
2285 * @param {TimeRanges} buffered - the TimeRanges object
2286 * @return {TimeRanges} a TimeRanges object of gaps
2287 */
2288
2289 var findGaps = function findGaps(buffered) {
2290 if (buffered.length < 2) {
2291 return videojs__default["default"].createTimeRanges();
2292 }
2293
2294 var ranges = [];
2295
2296 for (var i = 1; i < buffered.length; i++) {
2297 var start = buffered.end(i - 1);
2298 var end = buffered.start(i);
2299 ranges.push([start, end]);
2300 }
2301
2302 return videojs__default["default"].createTimeRanges(ranges);
2303 };
2304 /**
2305 * Calculate the intersection of two TimeRanges
2306 *
2307 * @param {TimeRanges} bufferA
2308 * @param {TimeRanges} bufferB
2309 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
2310 */
2311
2312 var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
2313 var start = null;
2314 var end = null;
2315 var arity = 0;
2316 var extents = [];
2317 var ranges = [];
2318
2319 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
2320 return videojs__default["default"].createTimeRange();
2321 } // Handle the case where we have both buffers and create an
2322 // intersection of the two
2323
2324
2325 var count = bufferA.length; // A) Gather up all start and end times
2326
2327 while (count--) {
2328 extents.push({
2329 time: bufferA.start(count),
2330 type: 'start'
2331 });
2332 extents.push({
2333 time: bufferA.end(count),
2334 type: 'end'
2335 });
2336 }
2337
2338 count = bufferB.length;
2339
2340 while (count--) {
2341 extents.push({
2342 time: bufferB.start(count),
2343 type: 'start'
2344 });
2345 extents.push({
2346 time: bufferB.end(count),
2347 type: 'end'
2348 });
2349 } // B) Sort them by time
2350
2351
2352 extents.sort(function (a, b) {
2353 return a.time - b.time;
2354 }); // C) Go along one by one incrementing arity for start and decrementing
2355 // arity for ends
2356
2357 for (count = 0; count < extents.length; count++) {
2358 if (extents[count].type === 'start') {
2359 arity++; // D) If arity is ever incremented to 2 we are entering an
2360 // overlapping range
2361
2362 if (arity === 2) {
2363 start = extents[count].time;
2364 }
2365 } else if (extents[count].type === 'end') {
2366 arity--; // E) If arity is ever decremented to 1 we leaving an
2367 // overlapping range
2368
2369 if (arity === 1) {
2370 end = extents[count].time;
2371 }
2372 } // F) Record overlapping ranges
2373
2374
2375 if (start !== null && end !== null) {
2376 ranges.push([start, end]);
2377 start = null;
2378 end = null;
2379 }
2380 }
2381
2382 return videojs__default["default"].createTimeRanges(ranges);
2383 };
2384 /**
2385 * Gets a human readable string for a TimeRange
2386 *
2387 * @param {TimeRange} range
2388 * @return {string} a human readable string
2389 */
2390
2391 var printableRange = function printableRange(range) {
2392 var strArr = [];
2393
2394 if (!range || !range.length) {
2395 return '';
2396 }
2397
2398 for (var i = 0; i < range.length; i++) {
2399 strArr.push(range.start(i) + ' => ' + range.end(i));
2400 }
2401
2402 return strArr.join(', ');
2403 };
2404 /**
2405 * Calculates the amount of time left in seconds until the player hits the end of the
2406 * buffer and causes a rebuffer
2407 *
2408 * @param {TimeRange} buffered
2409 * The state of the buffer
2410 * @param {Numnber} currentTime
2411 * The current time of the player
2412 * @param {number} playbackRate
2413 * The current playback rate of the player. Defaults to 1.
2414 * @return {number}
2415 * Time until the player has to start rebuffering in seconds.
2416 * @function timeUntilRebuffer
2417 */
2418
2419 var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
2420 if (playbackRate === void 0) {
2421 playbackRate = 1;
2422 }
2423
2424 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
2425 return (bufferedEnd - currentTime) / playbackRate;
2426 };
2427 /**
2428 * Converts a TimeRanges object into an array representation
2429 *
2430 * @param {TimeRanges} timeRanges
2431 * @return {Array}
2432 */
2433
2434 var timeRangesToArray = function timeRangesToArray(timeRanges) {
2435 var timeRangesList = [];
2436
2437 for (var i = 0; i < timeRanges.length; i++) {
2438 timeRangesList.push({
2439 start: timeRanges.start(i),
2440 end: timeRanges.end(i)
2441 });
2442 }
2443
2444 return timeRangesList;
2445 };
2446 /**
2447 * Determines if two time range objects are different.
2448 *
2449 * @param {TimeRange} a
2450 * the first time range object to check
2451 *
2452 * @param {TimeRange} b
2453 * the second time range object to check
2454 *
2455 * @return {Boolean}
2456 * Whether the time range objects differ
2457 */
2458
2459 var isRangeDifferent = function isRangeDifferent(a, b) {
2460 // same object
2461 if (a === b) {
2462 return false;
2463 } // one or the other is undefined
2464
2465
2466 if (!a && b || !b && a) {
2467 return true;
2468 } // length is different
2469
2470
2471 if (a.length !== b.length) {
2472 return true;
2473 } // see if any start/end pair is different
2474
2475
2476 for (var i = 0; i < a.length; i++) {
2477 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
2478 return true;
2479 }
2480 } // if the length and every pair is the same
2481 // this is the same time range
2482
2483
2484 return false;
2485 };
2486 var lastBufferedEnd = function lastBufferedEnd(a) {
2487 if (!a || !a.length || !a.end) {
2488 return;
2489 }
2490
2491 return a.end(a.length - 1);
2492 };
2493 /**
2494 * A utility function to add up the amount of time in a timeRange
2495 * after a specified startTime.
2496 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
2497 * would return 40 as there are 40s seconds after 0 in the timeRange
2498 *
2499 * @param {TimeRange} range
2500 * The range to check against
2501 * @param {number} startTime
2502 * The time in the time range that you should start counting from
2503 *
2504 * @return {number}
2505 * The number of seconds in the buffer passed the specified time.
2506 */
2507
2508 var timeAheadOf = function timeAheadOf(range, startTime) {
2509 var time = 0;
2510
2511 if (!range || !range.length) {
2512 return time;
2513 }
2514
2515 for (var i = 0; i < range.length; i++) {
2516 var start = range.start(i);
2517 var end = range.end(i); // startTime is after this range entirely
2518
2519 if (startTime > end) {
2520 continue;
2521 } // startTime is within this range
2522
2523
2524 if (startTime > start && startTime <= end) {
2525 time += end - startTime;
2526 continue;
2527 } // startTime is before this range.
2528
2529
2530 time += end - start;
2531 }
2532
2533 return time;
2534 };
2535
2536 /**
2537 * @file playlist.js
2538 *
2539 * Playlist related utilities.
2540 */
2541 var createTimeRange = videojs__default["default"].createTimeRange;
2542 /**
2543 * Get the duration of a segment, with special cases for
2544 * llhls segments that do not have a duration yet.
2545 *
2546 * @param {Object} playlist
2547 * the playlist that the segment belongs to.
2548 * @param {Object} segment
2549 * the segment to get a duration for.
2550 *
2551 * @return {number}
2552 * the segment duration
2553 */
2554
2555 var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
2556 // if this isn't a preload segment
2557 // then we will have a segment duration that is accurate.
2558 if (!segment.preload) {
2559 return segment.duration;
2560 } // otherwise we have to add up parts and preload hints
2561 // to get an up to date duration.
2562
2563
2564 var result = 0;
2565 (segment.parts || []).forEach(function (p) {
2566 result += p.duration;
2567 }); // for preload hints we have to use partTargetDuration
2568 // as they won't even have a duration yet.
2569
2570 (segment.preloadHints || []).forEach(function (p) {
2571 if (p.type === 'PART') {
2572 result += playlist.partTargetDuration;
2573 }
2574 });
2575 return result;
2576 };
2577 /**
2578 * A function to get a combined list of parts and segments with durations
2579 * and indexes.
2580 *
2581 * @param {Playlist} playlist the playlist to get the list for.
2582 *
2583 * @return {Array} The part/segment list.
2584 */
2585
2586 var getPartsAndSegments = function getPartsAndSegments(playlist) {
2587 return (playlist.segments || []).reduce(function (acc, segment, si) {
2588 if (segment.parts) {
2589 segment.parts.forEach(function (part, pi) {
2590 acc.push({
2591 duration: part.duration,
2592 segmentIndex: si,
2593 partIndex: pi,
2594 part: part,
2595 segment: segment
2596 });
2597 });
2598 } else {
2599 acc.push({
2600 duration: segment.duration,
2601 segmentIndex: si,
2602 partIndex: null,
2603 segment: segment,
2604 part: null
2605 });
2606 }
2607
2608 return acc;
2609 }, []);
2610 };
2611 var getLastParts = function getLastParts(media) {
2612 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
2613 return lastSegment && lastSegment.parts || [];
2614 };
2615 var getKnownPartCount = function getKnownPartCount(_ref) {
2616 var preloadSegment = _ref.preloadSegment;
2617
2618 if (!preloadSegment) {
2619 return;
2620 }
2621
2622 var parts = preloadSegment.parts,
2623 preloadHints = preloadSegment.preloadHints;
2624 var partCount = (preloadHints || []).reduce(function (count, hint) {
2625 return count + (hint.type === 'PART' ? 1 : 0);
2626 }, 0);
2627 partCount += parts && parts.length ? parts.length : 0;
2628 return partCount;
2629 };
2630 /**
2631 * Get the number of seconds to delay from the end of a
2632 * live playlist.
2633 *
2634 * @param {Playlist} master the master playlist
2635 * @param {Playlist} media the media playlist
2636 * @return {number} the hold back in seconds.
2637 */
2638
2639 var liveEdgeDelay = function liveEdgeDelay(master, media) {
2640 if (media.endList) {
2641 return 0;
2642 } // dash suggestedPresentationDelay trumps everything
2643
2644
2645 if (master && master.suggestedPresentationDelay) {
2646 return master.suggestedPresentationDelay;
2647 }
2648
2649 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
2650
2651 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
2652 return media.serverControl.partHoldBack;
2653 } else if (hasParts && media.partTargetDuration) {
2654 return media.partTargetDuration * 3; // finally look for full segment delays
2655 } else if (media.serverControl && media.serverControl.holdBack) {
2656 return media.serverControl.holdBack;
2657 } else if (media.targetDuration) {
2658 return media.targetDuration * 3;
2659 }
2660
2661 return 0;
2662 };
2663 /**
2664 * walk backward until we find a duration we can use
2665 * or return a failure
2666 *
2667 * @param {Playlist} playlist the playlist to walk through
2668 * @param {Number} endSequence the mediaSequence to stop walking on
2669 */
2670
2671 var backwardDuration = function backwardDuration(playlist, endSequence) {
2672 var result = 0;
2673 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
2674 // the interval, use it
2675
2676 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
2677 // information that is earlier than endSequence
2678
2679 if (segment) {
2680 if (typeof segment.start !== 'undefined') {
2681 return {
2682 result: segment.start,
2683 precise: true
2684 };
2685 }
2686
2687 if (typeof segment.end !== 'undefined') {
2688 return {
2689 result: segment.end - segment.duration,
2690 precise: true
2691 };
2692 }
2693 }
2694
2695 while (i--) {
2696 segment = playlist.segments[i];
2697
2698 if (typeof segment.end !== 'undefined') {
2699 return {
2700 result: result + segment.end,
2701 precise: true
2702 };
2703 }
2704
2705 result += segmentDurationWithParts(playlist, segment);
2706
2707 if (typeof segment.start !== 'undefined') {
2708 return {
2709 result: result + segment.start,
2710 precise: true
2711 };
2712 }
2713 }
2714
2715 return {
2716 result: result,
2717 precise: false
2718 };
2719 };
2720 /**
2721 * walk forward until we find a duration we can use
2722 * or return a failure
2723 *
2724 * @param {Playlist} playlist the playlist to walk through
2725 * @param {number} endSequence the mediaSequence to stop walking on
2726 */
2727
2728
2729 var forwardDuration = function forwardDuration(playlist, endSequence) {
2730 var result = 0;
2731 var segment;
2732 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
2733 // information
2734
2735 for (; i < playlist.segments.length; i++) {
2736 segment = playlist.segments[i];
2737
2738 if (typeof segment.start !== 'undefined') {
2739 return {
2740 result: segment.start - result,
2741 precise: true
2742 };
2743 }
2744
2745 result += segmentDurationWithParts(playlist, segment);
2746
2747 if (typeof segment.end !== 'undefined') {
2748 return {
2749 result: segment.end - result,
2750 precise: true
2751 };
2752 }
2753 } // indicate we didn't find a useful duration estimate
2754
2755
2756 return {
2757 result: -1,
2758 precise: false
2759 };
2760 };
2761 /**
2762 * Calculate the media duration from the segments associated with a
2763 * playlist. The duration of a subinterval of the available segments
2764 * may be calculated by specifying an end index.
2765 *
2766 * @param {Object} playlist a media playlist object
2767 * @param {number=} endSequence an exclusive upper boundary
2768 * for the playlist. Defaults to playlist length.
2769 * @param {number} expired the amount of time that has dropped
2770 * off the front of the playlist in a live scenario
2771 * @return {number} the duration between the first available segment
2772 * and end index.
2773 */
2774
2775
2776 var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
2777 if (typeof endSequence === 'undefined') {
2778 endSequence = playlist.mediaSequence + playlist.segments.length;
2779 }
2780
2781 if (endSequence < playlist.mediaSequence) {
2782 return 0;
2783 } // do a backward walk to estimate the duration
2784
2785
2786 var backward = backwardDuration(playlist, endSequence);
2787
2788 if (backward.precise) {
2789 // if we were able to base our duration estimate on timing
2790 // information provided directly from the Media Source, return
2791 // it
2792 return backward.result;
2793 } // walk forward to see if a precise duration estimate can be made
2794 // that way
2795
2796
2797 var forward = forwardDuration(playlist, endSequence);
2798
2799 if (forward.precise) {
2800 // we found a segment that has been buffered and so it's
2801 // position is known precisely
2802 return forward.result;
2803 } // return the less-precise, playlist-based duration estimate
2804
2805
2806 return backward.result + expired;
2807 };
2808 /**
2809 * Calculates the duration of a playlist. If a start and end index
2810 * are specified, the duration will be for the subset of the media
2811 * timeline between those two indices. The total duration for live
2812 * playlists is always Infinity.
2813 *
2814 * @param {Object} playlist a media playlist object
2815 * @param {number=} endSequence an exclusive upper
2816 * boundary for the playlist. Defaults to the playlist media
2817 * sequence number plus its length.
2818 * @param {number=} expired the amount of time that has
2819 * dropped off the front of the playlist in a live scenario
2820 * @return {number} the duration between the start index and end
2821 * index.
2822 */
2823
2824
2825 var duration = function duration(playlist, endSequence, expired) {
2826 if (!playlist) {
2827 return 0;
2828 }
2829
2830 if (typeof expired !== 'number') {
2831 expired = 0;
2832 } // if a slice of the total duration is not requested, use
2833 // playlist-level duration indicators when they're present
2834
2835
2836 if (typeof endSequence === 'undefined') {
2837 // if present, use the duration specified in the playlist
2838 if (playlist.totalDuration) {
2839 return playlist.totalDuration;
2840 } // duration should be Infinity for live playlists
2841
2842
2843 if (!playlist.endList) {
2844 return window.Infinity;
2845 }
2846 } // calculate the total duration based on the segment durations
2847
2848
2849 return intervalDuration(playlist, endSequence, expired);
2850 };
2851 /**
2852 * Calculate the time between two indexes in the current playlist
2853 * neight the start- nor the end-index need to be within the current
2854 * playlist in which case, the targetDuration of the playlist is used
2855 * to approximate the durations of the segments
2856 *
2857 * @param {Array} options.durationList list to iterate over for durations.
2858 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
2859 * @param {number} options.startIndex partsAndSegments index to start
2860 * @param {number} options.endIndex partsAndSegments index to end.
2861 * @return {number} the number of seconds between startIndex and endIndex
2862 */
2863
2864 var sumDurations = function sumDurations(_ref2) {
2865 var defaultDuration = _ref2.defaultDuration,
2866 durationList = _ref2.durationList,
2867 startIndex = _ref2.startIndex,
2868 endIndex = _ref2.endIndex;
2869 var durations = 0;
2870
2871 if (startIndex > endIndex) {
2872 var _ref3 = [endIndex, startIndex];
2873 startIndex = _ref3[0];
2874 endIndex = _ref3[1];
2875 }
2876
2877 if (startIndex < 0) {
2878 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
2879 durations += defaultDuration;
2880 }
2881
2882 startIndex = 0;
2883 }
2884
2885 for (var _i = startIndex; _i < endIndex; _i++) {
2886 durations += durationList[_i].duration;
2887 }
2888
2889 return durations;
2890 };
2891 /**
2892 * Calculates the playlist end time
2893 *
2894 * @param {Object} playlist a media playlist object
2895 * @param {number=} expired the amount of time that has
2896 * dropped off the front of the playlist in a live scenario
2897 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
2898 * playlist end calculation should consider the safe live end
2899 * (truncate the playlist end by three segments). This is normally
2900 * used for calculating the end of the playlist's seekable range.
2901 * This takes into account the value of liveEdgePadding.
2902 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
2903 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
2904 * If this is provided, it is used in the safe live end calculation.
2905 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
2906 * Corresponds to suggestedPresentationDelay in DASH manifests.
2907 * @return {number} the end time of playlist
2908 * @function playlistEnd
2909 */
2910
2911 var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
2912 if (!playlist || !playlist.segments) {
2913 return null;
2914 }
2915
2916 if (playlist.endList) {
2917 return duration(playlist);
2918 }
2919
2920 if (expired === null) {
2921 return null;
2922 }
2923
2924 expired = expired || 0;
2925 var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
2926
2927 if (useSafeLiveEnd) {
2928 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
2929 lastSegmentEndTime -= liveEdgePadding;
2930 } // don't return a time less than zero
2931
2932
2933 return Math.max(0, lastSegmentEndTime);
2934 };
2935 /**
2936 * Calculates the interval of time that is currently seekable in a
2937 * playlist. The returned time ranges are relative to the earliest
2938 * moment in the specified playlist that is still available. A full
2939 * seekable implementation for live streams would need to offset
2940 * these values by the duration of content that has expired from the
2941 * stream.
2942 *
2943 * @param {Object} playlist a media playlist object
2944 * dropped off the front of the playlist in a live scenario
2945 * @param {number=} expired the amount of time that has
2946 * dropped off the front of the playlist in a live scenario
2947 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
2948 * Corresponds to suggestedPresentationDelay in DASH manifests.
2949 * @return {TimeRanges} the periods of time that are valid targets
2950 * for seeking
2951 */
2952
2953 var seekable = function seekable(playlist, expired, liveEdgePadding) {
2954 var useSafeLiveEnd = true;
2955 var seekableStart = expired || 0;
2956 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
2957
2958 if (seekableEnd === null) {
2959 return createTimeRange();
2960 }
2961
2962 return createTimeRange(seekableStart, seekableEnd);
2963 };
2964 /**
2965 * Determine the index and estimated starting time of the segment that
2966 * contains a specified playback position in a media playlist.
2967 *
2968 * @param {Object} options.playlist the media playlist to query
2969 * @param {number} options.currentTime The number of seconds since the earliest
2970 * possible position to determine the containing segment for
2971 * @param {number} options.startTime the time when the segment/part starts
2972 * @param {number} options.startingSegmentIndex the segment index to start looking at.
2973 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
2974 *
2975 * @return {Object} an object with partIndex, segmentIndex, and startTime.
2976 */
2977
2978 var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
2979 var playlist = _ref4.playlist,
2980 currentTime = _ref4.currentTime,
2981 startingSegmentIndex = _ref4.startingSegmentIndex,
2982 startingPartIndex = _ref4.startingPartIndex,
2983 startTime = _ref4.startTime,
2984 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
2985 var time = currentTime - startTime;
2986 var partsAndSegments = getPartsAndSegments(playlist);
2987 var startIndex = 0;
2988
2989 for (var i = 0; i < partsAndSegments.length; i++) {
2990 var partAndSegment = partsAndSegments[i];
2991
2992 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
2993 continue;
2994 } // skip this if part index does not match.
2995
2996
2997 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
2998 continue;
2999 }
3000
3001 startIndex = i;
3002 break;
3003 }
3004
3005 if (time < 0) {
3006 // Walk backward from startIndex in the playlist, adding durations
3007 // until we find a segment that contains `time` and return it
3008 if (startIndex > 0) {
3009 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
3010 var _partAndSegment = partsAndSegments[_i2];
3011 time += _partAndSegment.duration;
3012
3013 if (experimentalExactManifestTimings) {
3014 if (time < 0) {
3015 continue;
3016 }
3017 } else if (time + TIME_FUDGE_FACTOR <= 0) {
3018 continue;
3019 }
3020
3021 return {
3022 partIndex: _partAndSegment.partIndex,
3023 segmentIndex: _partAndSegment.segmentIndex,
3024 startTime: startTime - sumDurations({
3025 defaultDuration: playlist.targetDuration,
3026 durationList: partsAndSegments,
3027 startIndex: startIndex,
3028 endIndex: _i2
3029 })
3030 };
3031 }
3032 } // We were unable to find a good segment within the playlist
3033 // so select the first segment
3034
3035
3036 return {
3037 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3038 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3039 startTime: currentTime
3040 };
3041 } // When startIndex is negative, we first walk forward to first segment
3042 // adding target durations. If we "run out of time" before getting to
3043 // the first segment, return the first segment
3044
3045
3046 if (startIndex < 0) {
3047 for (var _i3 = startIndex; _i3 < 0; _i3++) {
3048 time -= playlist.targetDuration;
3049
3050 if (time < 0) {
3051 return {
3052 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
3053 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
3054 startTime: currentTime
3055 };
3056 }
3057 }
3058
3059 startIndex = 0;
3060 } // Walk forward from startIndex in the playlist, subtracting durations
3061 // until we find a segment that contains `time` and return it
3062
3063
3064 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
3065 var _partAndSegment2 = partsAndSegments[_i4];
3066 time -= _partAndSegment2.duration;
3067
3068 if (experimentalExactManifestTimings) {
3069 if (time > 0) {
3070 continue;
3071 }
3072 } else if (time - TIME_FUDGE_FACTOR >= 0) {
3073 continue;
3074 }
3075
3076 return {
3077 partIndex: _partAndSegment2.partIndex,
3078 segmentIndex: _partAndSegment2.segmentIndex,
3079 startTime: startTime + sumDurations({
3080 defaultDuration: playlist.targetDuration,
3081 durationList: partsAndSegments,
3082 startIndex: startIndex,
3083 endIndex: _i4
3084 })
3085 };
3086 } // We are out of possible candidates so load the last one...
3087
3088
3089 return {
3090 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
3091 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
3092 startTime: currentTime
3093 };
3094 };
3095 /**
3096 * Check whether the playlist is blacklisted or not.
3097 *
3098 * @param {Object} playlist the media playlist object
3099 * @return {boolean} whether the playlist is blacklisted or not
3100 * @function isBlacklisted
3101 */
3102
3103 var isBlacklisted = function isBlacklisted(playlist) {
3104 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
3105 };
3106 /**
3107 * Check whether the playlist is compatible with current playback configuration or has
3108 * been blacklisted permanently for being incompatible.
3109 *
3110 * @param {Object} playlist the media playlist object
3111 * @return {boolean} whether the playlist is incompatible or not
3112 * @function isIncompatible
3113 */
3114
3115 var isIncompatible = function isIncompatible(playlist) {
3116 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
3117 };
3118 /**
3119 * Check whether the playlist is enabled or not.
3120 *
3121 * @param {Object} playlist the media playlist object
3122 * @return {boolean} whether the playlist is enabled or not
3123 * @function isEnabled
3124 */
3125
3126 var isEnabled = function isEnabled(playlist) {
3127 var blacklisted = isBlacklisted(playlist);
3128 return !playlist.disabled && !blacklisted;
3129 };
3130 /**
3131 * Check whether the playlist has been manually disabled through the representations api.
3132 *
3133 * @param {Object} playlist the media playlist object
3134 * @return {boolean} whether the playlist is disabled manually or not
3135 * @function isDisabled
3136 */
3137
3138 var isDisabled = function isDisabled(playlist) {
3139 return playlist.disabled;
3140 };
3141 /**
3142 * Returns whether the current playlist is an AES encrypted HLS stream
3143 *
3144 * @return {boolean} true if it's an AES encrypted HLS stream
3145 */
3146
3147 var isAes = function isAes(media) {
3148 for (var i = 0; i < media.segments.length; i++) {
3149 if (media.segments[i].key) {
3150 return true;
3151 }
3152 }
3153
3154 return false;
3155 };
3156 /**
3157 * Checks if the playlist has a value for the specified attribute
3158 *
3159 * @param {string} attr
3160 * Attribute to check for
3161 * @param {Object} playlist
3162 * The media playlist object
3163 * @return {boolean}
3164 * Whether the playlist contains a value for the attribute or not
3165 * @function hasAttribute
3166 */
3167
3168 var hasAttribute = function hasAttribute(attr, playlist) {
3169 return playlist.attributes && playlist.attributes[attr];
3170 };
3171 /**
3172 * Estimates the time required to complete a segment download from the specified playlist
3173 *
3174 * @param {number} segmentDuration
3175 * Duration of requested segment
3176 * @param {number} bandwidth
3177 * Current measured bandwidth of the player
3178 * @param {Object} playlist
3179 * The media playlist object
3180 * @param {number=} bytesReceived
3181 * Number of bytes already received for the request. Defaults to 0
3182 * @return {number|NaN}
3183 * The estimated time to request the segment. NaN if bandwidth information for
3184 * the given playlist is unavailable
3185 * @function estimateSegmentRequestTime
3186 */
3187
3188 var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
3189 if (bytesReceived === void 0) {
3190 bytesReceived = 0;
3191 }
3192
3193 if (!hasAttribute('BANDWIDTH', playlist)) {
3194 return NaN;
3195 }
3196
3197 var size = segmentDuration * playlist.attributes.BANDWIDTH;
3198 return (size - bytesReceived * 8) / bandwidth;
3199 };
3200 /*
3201 * Returns whether the current playlist is the lowest rendition
3202 *
3203 * @return {Boolean} true if on lowest rendition
3204 */
3205
3206 var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
3207 if (master.playlists.length === 1) {
3208 return true;
3209 }
3210
3211 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
3212 return master.playlists.filter(function (playlist) {
3213 if (!isEnabled(playlist)) {
3214 return false;
3215 }
3216
3217 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
3218 }).length === 0;
3219 };
3220 var playlistMatch = function playlistMatch(a, b) {
3221 // both playlits are null
3222 // or only one playlist is non-null
3223 // no match
3224 if (!a && !b || !a && b || a && !b) {
3225 return false;
3226 } // playlist objects are the same, match
3227
3228
3229 if (a === b) {
3230 return true;
3231 } // first try to use id as it should be the most
3232 // accurate
3233
3234
3235 if (a.id && b.id && a.id === b.id) {
3236 return true;
3237 } // next try to use reslovedUri as it should be the
3238 // second most accurate.
3239
3240
3241 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
3242 return true;
3243 } // finally try to use uri as it should be accurate
3244 // but might miss a few cases for relative uris
3245
3246
3247 if (a.uri && b.uri && a.uri === b.uri) {
3248 return true;
3249 }
3250
3251 return false;
3252 };
3253
3254 var someAudioVariant = function someAudioVariant(master, callback) {
3255 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
3256 var found = false;
3257
3258 for (var groupName in AUDIO) {
3259 for (var label in AUDIO[groupName]) {
3260 found = callback(AUDIO[groupName][label]);
3261
3262 if (found) {
3263 break;
3264 }
3265 }
3266
3267 if (found) {
3268 break;
3269 }
3270 }
3271
3272 return !!found;
3273 };
3274
3275 var isAudioOnly = function isAudioOnly(master) {
3276 // we are audio only if we have no main playlists but do
3277 // have media group playlists.
3278 if (!master || !master.playlists || !master.playlists.length) {
3279 // without audio variants or playlists this
3280 // is not an audio only master.
3281 var found = someAudioVariant(master, function (variant) {
3282 return variant.playlists && variant.playlists.length || variant.uri;
3283 });
3284 return found;
3285 } // if every playlist has only an audio codec it is audio only
3286
3287
3288 var _loop = function _loop(i) {
3289 var playlist = master.playlists[i];
3290 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
3291
3292 if (CODECS && CODECS.split(',').every(function (c) {
3293 return isAudioCodec(c);
3294 })) {
3295 return "continue";
3296 } // playlist is in an audio group it is audio only
3297
3298
3299 var found = someAudioVariant(master, function (variant) {
3300 return playlistMatch(playlist, variant);
3301 });
3302
3303 if (found) {
3304 return "continue";
3305 } // if we make it here this playlist isn't audio and we
3306 // are not audio only
3307
3308
3309 return {
3310 v: false
3311 };
3312 };
3313
3314 for (var i = 0; i < master.playlists.length; i++) {
3315 var _ret = _loop(i);
3316
3317 if (_ret === "continue") continue;
3318 if (typeof _ret === "object") return _ret.v;
3319 } // if we make it past every playlist without returning, then
3320 // this is an audio only playlist.
3321
3322
3323 return true;
3324 }; // exports
3325
3326 var Playlist = {
3327 liveEdgeDelay: liveEdgeDelay,
3328 duration: duration,
3329 seekable: seekable,
3330 getMediaInfoForTime: getMediaInfoForTime,
3331 isEnabled: isEnabled,
3332 isDisabled: isDisabled,
3333 isBlacklisted: isBlacklisted,
3334 isIncompatible: isIncompatible,
3335 playlistEnd: playlistEnd,
3336 isAes: isAes,
3337 hasAttribute: hasAttribute,
3338 estimateSegmentRequestTime: estimateSegmentRequestTime,
3339 isLowestEnabledRendition: isLowestEnabledRendition,
3340 isAudioOnly: isAudioOnly,
3341 playlistMatch: playlistMatch,
3342 segmentDurationWithParts: segmentDurationWithParts
3343 };
3344
3345 var log = videojs__default["default"].log;
3346 var createPlaylistID = function createPlaylistID(index, uri) {
3347 return index + "-" + uri;
3348 }; // default function for creating a group id
3349
3350 var groupID = function groupID(type, group, label) {
3351 return "placeholder-uri-" + type + "-" + group + "-" + label;
3352 };
3353 /**
3354 * Parses a given m3u8 playlist
3355 *
3356 * @param {Function} [onwarn]
3357 * a function to call when the parser triggers a warning event.
3358 * @param {Function} [oninfo]
3359 * a function to call when the parser triggers an info event.
3360 * @param {string} manifestString
3361 * The downloaded manifest string
3362 * @param {Object[]} [customTagParsers]
3363 * An array of custom tag parsers for the m3u8-parser instance
3364 * @param {Object[]} [customTagMappers]
3365 * An array of custom tag mappers for the m3u8-parser instance
3366 * @param {boolean} [experimentalLLHLS=false]
3367 * Whether to keep ll-hls features in the manifest after parsing.
3368 * @return {Object}
3369 * The manifest object
3370 */
3371
3372
3373 var parseManifest = function parseManifest(_ref) {
3374 var onwarn = _ref.onwarn,
3375 oninfo = _ref.oninfo,
3376 manifestString = _ref.manifestString,
3377 _ref$customTagParsers = _ref.customTagParsers,
3378 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
3379 _ref$customTagMappers = _ref.customTagMappers,
3380 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
3381 experimentalLLHLS = _ref.experimentalLLHLS;
3382 var parser = new Parser();
3383
3384 if (onwarn) {
3385 parser.on('warn', onwarn);
3386 }
3387
3388 if (oninfo) {
3389 parser.on('info', oninfo);
3390 }
3391
3392 customTagParsers.forEach(function (customParser) {
3393 return parser.addParser(customParser);
3394 });
3395 customTagMappers.forEach(function (mapper) {
3396 return parser.addTagMapper(mapper);
3397 });
3398 parser.push(manifestString);
3399 parser.end();
3400 var manifest = parser.manifest; // remove llhls features from the parsed manifest
3401 // if we don't want llhls support.
3402
3403 if (!experimentalLLHLS) {
3404 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
3405 if (manifest.hasOwnProperty(k)) {
3406 delete manifest[k];
3407 }
3408 });
3409
3410 if (manifest.segments) {
3411 manifest.segments.forEach(function (segment) {
3412 ['parts', 'preloadHints'].forEach(function (k) {
3413 if (segment.hasOwnProperty(k)) {
3414 delete segment[k];
3415 }
3416 });
3417 });
3418 }
3419 }
3420
3421 if (!manifest.targetDuration) {
3422 var targetDuration = 10;
3423
3424 if (manifest.segments && manifest.segments.length) {
3425 targetDuration = manifest.segments.reduce(function (acc, s) {
3426 return Math.max(acc, s.duration);
3427 }, 0);
3428 }
3429
3430 if (onwarn) {
3431 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
3432 }
3433
3434 manifest.targetDuration = targetDuration;
3435 }
3436
3437 var parts = getLastParts(manifest);
3438
3439 if (parts.length && !manifest.partTargetDuration) {
3440 var partTargetDuration = parts.reduce(function (acc, p) {
3441 return Math.max(acc, p.duration);
3442 }, 0);
3443
3444 if (onwarn) {
3445 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
3446 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
3447 }
3448
3449 manifest.partTargetDuration = partTargetDuration;
3450 }
3451
3452 return manifest;
3453 };
3454 /**
3455 * Loops through all supported media groups in master and calls the provided
3456 * callback for each group
3457 *
3458 * @param {Object} master
3459 * The parsed master manifest object
3460 * @param {Function} callback
3461 * Callback to call for each media group
3462 */
3463
3464 var forEachMediaGroup$1 = function forEachMediaGroup(master, callback) {
3465 if (!master.mediaGroups) {
3466 return;
3467 }
3468
3469 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
3470 if (!master.mediaGroups[mediaType]) {
3471 return;
3472 }
3473
3474 for (var groupKey in master.mediaGroups[mediaType]) {
3475 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
3476 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
3477 callback(mediaProperties, mediaType, groupKey, labelKey);
3478 }
3479 }
3480 });
3481 };
3482 /**
3483 * Adds properties and attributes to the playlist to keep consistent functionality for
3484 * playlists throughout VHS.
3485 *
3486 * @param {Object} config
3487 * Arguments object
3488 * @param {Object} config.playlist
3489 * The media playlist
3490 * @param {string} [config.uri]
3491 * The uri to the media playlist (if media playlist is not from within a master
3492 * playlist)
3493 * @param {string} id
3494 * ID to use for the playlist
3495 */
3496
3497 var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
3498 var playlist = _ref2.playlist,
3499 uri = _ref2.uri,
3500 id = _ref2.id;
3501 playlist.id = id;
3502 playlist.playlistErrors_ = 0;
3503
3504 if (uri) {
3505 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
3506 // playlists do not contain their own source URI, but one is needed for consistency in
3507 // VHS.
3508 playlist.uri = uri;
3509 } // For HLS master playlists, even though certain attributes MUST be defined, the
3510 // stream may still be played without them.
3511 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
3512 // manifest.
3513 //
3514 // To avoid undefined reference errors through the project, and make the code easier
3515 // to write/read, add an empty attributes object for these cases.
3516
3517
3518 playlist.attributes = playlist.attributes || {};
3519 };
3520 /**
3521 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
3522 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
3523 * playlist references to the playlists array.
3524 *
3525 * @param {Object} master
3526 * The master playlist
3527 */
3528
3529 var setupMediaPlaylists = function setupMediaPlaylists(master) {
3530 var i = master.playlists.length;
3531
3532 while (i--) {
3533 var playlist = master.playlists[i];
3534 setupMediaPlaylist({
3535 playlist: playlist,
3536 id: createPlaylistID(i, playlist.uri)
3537 });
3538 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
3539 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
3540
3541 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
3542 // the stream can be played without it. Although an attributes property may have been
3543 // added to the playlist to prevent undefined references, issue a warning to fix the
3544 // manifest.
3545
3546 if (!playlist.attributes.BANDWIDTH) {
3547 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
3548 }
3549 }
3550 };
3551 /**
3552 * Adds resolvedUri properties to each media group.
3553 *
3554 * @param {Object} master
3555 * The master playlist
3556 */
3557
3558 var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
3559 forEachMediaGroup$1(master, function (properties) {
3560 if (properties.uri) {
3561 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
3562 }
3563 });
3564 };
3565 /**
3566 * Creates a master playlist wrapper to insert a sole media playlist into.
3567 *
3568 * @param {Object} media
3569 * Media playlist
3570 * @param {string} uri
3571 * The media URI
3572 *
3573 * @return {Object}
3574 * Master playlist
3575 */
3576
3577 var masterForMedia = function masterForMedia(media, uri) {
3578 var id = createPlaylistID(0, uri);
3579 var master = {
3580 mediaGroups: {
3581 'AUDIO': {},
3582 'VIDEO': {},
3583 'CLOSED-CAPTIONS': {},
3584 'SUBTITLES': {}
3585 },
3586 uri: window.location.href,
3587 resolvedUri: window.location.href,
3588 playlists: [{
3589 uri: uri,
3590 id: id,
3591 resolvedUri: uri,
3592 // m3u8-parser does not attach an attributes property to media playlists so make
3593 // sure that the property is attached to avoid undefined reference errors
3594 attributes: {}
3595 }]
3596 }; // set up ID reference
3597
3598 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
3599
3600 master.playlists[uri] = master.playlists[0];
3601 return master;
3602 };
3603 /**
3604 * Does an in-place update of the master manifest to add updated playlist URI references
3605 * as well as other properties needed by VHS that aren't included by the parser.
3606 *
3607 * @param {Object} master
3608 * Master manifest object
3609 * @param {string} uri
3610 * The source URI
3611 * @param {function} createGroupID
3612 * A function to determine how to create the groupID for mediaGroups
3613 */
3614
3615 var addPropertiesToMaster = function addPropertiesToMaster(master, uri, createGroupID) {
3616 if (createGroupID === void 0) {
3617 createGroupID = groupID;
3618 }
3619
3620 master.uri = uri;
3621
3622 for (var i = 0; i < master.playlists.length; i++) {
3623 if (!master.playlists[i].uri) {
3624 // Set up phony URIs for the playlists since playlists are referenced by their URIs
3625 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
3626 // TODO: consider adding dummy URIs in mpd-parser
3627 var phonyUri = "placeholder-uri-" + i;
3628 master.playlists[i].uri = phonyUri;
3629 }
3630 }
3631
3632 var audioOnlyMaster = isAudioOnly(master);
3633 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3634 // add a playlist array under properties
3635 if (!properties.playlists || !properties.playlists.length) {
3636 // If the manifest is audio only and this media group does not have a uri, check
3637 // if the media group is located in the main list of playlists. If it is, don't add
3638 // placeholder properties as it shouldn't be considered an alternate audio track.
3639 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
3640 for (var _i = 0; _i < master.playlists.length; _i++) {
3641 var p = master.playlists[_i];
3642
3643 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
3644 return;
3645 }
3646 }
3647 }
3648
3649 properties.playlists = [_extends_1({}, properties)];
3650 }
3651
3652 properties.playlists.forEach(function (p, i) {
3653 var groupId = createGroupID(mediaType, groupKey, labelKey, p);
3654 var id = createPlaylistID(i, groupId);
3655
3656 if (p.uri) {
3657 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
3658 } else {
3659 // DEPRECATED, this has been added to prevent a breaking change.
3660 // previously we only ever had a single media group playlist, so
3661 // we mark the first playlist uri without prepending the index as we used to
3662 // ideally we would do all of the playlists the same way.
3663 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
3664 // the placeholder again
3665
3666 p.resolvedUri = p.uri;
3667 }
3668
3669 p.id = p.id || id; // add an empty attributes object, all playlists are
3670 // expected to have this.
3671
3672 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
3673
3674 master.playlists[p.id] = p;
3675 master.playlists[p.uri] = p;
3676 });
3677 });
3678 setupMediaPlaylists(master);
3679 resolveMediaGroupUris(master);
3680 };
3681
3682 var mergeOptions$2 = videojs__default["default"].mergeOptions,
3683 EventTarget$1 = videojs__default["default"].EventTarget;
3684
3685 var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
3686 if (media.endList || !media.serverControl) {
3687 return uri;
3688 }
3689
3690 var parameters = {};
3691
3692 if (media.serverControl.canBlockReload) {
3693 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
3694
3695 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
3696 // that we are going to request a part of that preload segment.
3697 // the logic below is used to determine that.
3698
3699 if (preloadSegment) {
3700 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
3701
3702 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
3703 // length of parts, then we know we had part preload hints
3704 // and we need to add the _HLS_part= query
3705
3706 if (nextPart > -1 && nextPart !== parts.length - 1) {
3707 // add existing parts to our preload hints
3708 // eslint-disable-next-line
3709 parameters._HLS_part = nextPart;
3710 } // this if statement makes sure that we request the msn
3711 // of the preload segment if:
3712 // 1. the preload segment had parts (and was not yet a full segment)
3713 // but was added to our segments array
3714 // 2. the preload segment had preload hints for parts that are not in
3715 // the manifest yet.
3716 // in all other cases we want the segment after the preload segment
3717 // which will be given by using media.segments.length because it is 1 based
3718 // rather than 0 based.
3719
3720
3721 if (nextPart > -1 || parts.length) {
3722 nextMSN--;
3723 }
3724 } // add _HLS_msn= in front of any _HLS_part query
3725 // eslint-disable-next-line
3726
3727
3728 parameters._HLS_msn = nextMSN;
3729 }
3730
3731 if (media.serverControl && media.serverControl.canSkipUntil) {
3732 // add _HLS_skip= infront of all other queries.
3733 // eslint-disable-next-line
3734 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
3735 }
3736
3737 if (Object.keys(parameters).length) {
3738 var parsedUri = new window.URL(uri);
3739 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
3740 if (!parameters.hasOwnProperty(name)) {
3741 return;
3742 }
3743
3744 parsedUri.searchParams.set(name, parameters[name]);
3745 });
3746 uri = parsedUri.toString();
3747 }
3748
3749 return uri;
3750 };
3751 /**
3752 * Returns a new segment object with properties and
3753 * the parts array merged.
3754 *
3755 * @param {Object} a the old segment
3756 * @param {Object} b the new segment
3757 *
3758 * @return {Object} the merged segment
3759 */
3760
3761
3762 var updateSegment = function updateSegment(a, b) {
3763 if (!a) {
3764 return b;
3765 }
3766
3767 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
3768 // and the new one does not, remove preload hints.
3769
3770 if (a.preloadHints && !b.preloadHints) {
3771 delete result.preloadHints;
3772 } // if only the old segment has parts
3773 // then the parts are no longer valid
3774
3775
3776 if (a.parts && !b.parts) {
3777 delete result.parts; // if both segments have parts
3778 // copy part propeties from the old segment
3779 // to the new one.
3780 } else if (a.parts && b.parts) {
3781 for (var i = 0; i < b.parts.length; i++) {
3782 if (a.parts && a.parts[i]) {
3783 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
3784 }
3785 }
3786 } // set skipped to false for segments that have
3787 // have had information merged from the old segment.
3788
3789
3790 if (!a.skipped && b.skipped) {
3791 result.skipped = false;
3792 } // set preload to false for segments that have
3793 // had information added in the new segment.
3794
3795
3796 if (a.preload && !b.preload) {
3797 result.preload = false;
3798 }
3799
3800 return result;
3801 };
3802 /**
3803 * Returns a new array of segments that is the result of merging
3804 * properties from an older list of segments onto an updated
3805 * list. No properties on the updated playlist will be ovewritten.
3806 *
3807 * @param {Array} original the outdated list of segments
3808 * @param {Array} update the updated list of segments
3809 * @param {number=} offset the index of the first update
3810 * segment in the original segment list. For non-live playlists,
3811 * this should always be zero and does not need to be
3812 * specified. For live playlists, it should be the difference
3813 * between the media sequence numbers in the original and updated
3814 * playlists.
3815 * @return {Array} a list of merged segment objects
3816 */
3817
3818 var updateSegments = function updateSegments(original, update, offset) {
3819 var oldSegments = original.slice();
3820 var newSegments = update.slice();
3821 offset = offset || 0;
3822 var result = [];
3823 var currentMap;
3824
3825 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
3826 var oldSegment = oldSegments[newIndex + offset];
3827 var newSegment = newSegments[newIndex];
3828
3829 if (oldSegment) {
3830 currentMap = oldSegment.map || currentMap;
3831 result.push(updateSegment(oldSegment, newSegment));
3832 } else {
3833 // carry over map to new segment if it is missing
3834 if (currentMap && !newSegment.map) {
3835 newSegment.map = currentMap;
3836 }
3837
3838 result.push(newSegment);
3839 }
3840 }
3841
3842 return result;
3843 };
3844 var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
3845 // preloadSegment will not have a uri at all
3846 // as the segment isn't actually in the manifest yet, only parts
3847 if (!segment.resolvedUri && segment.uri) {
3848 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
3849 }
3850
3851 if (segment.key && !segment.key.resolvedUri) {
3852 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
3853 }
3854
3855 if (segment.map && !segment.map.resolvedUri) {
3856 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
3857 }
3858
3859 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
3860 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
3861 }
3862
3863 if (segment.parts && segment.parts.length) {
3864 segment.parts.forEach(function (p) {
3865 if (p.resolvedUri) {
3866 return;
3867 }
3868
3869 p.resolvedUri = resolveUrl(baseUri, p.uri);
3870 });
3871 }
3872
3873 if (segment.preloadHints && segment.preloadHints.length) {
3874 segment.preloadHints.forEach(function (p) {
3875 if (p.resolvedUri) {
3876 return;
3877 }
3878
3879 p.resolvedUri = resolveUrl(baseUri, p.uri);
3880 });
3881 }
3882 };
3883
3884 var getAllSegments = function getAllSegments(media) {
3885 var segments = media.segments || [];
3886 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
3887 // a usable segment, only include a preloadSegment that has
3888 // parts.
3889
3890 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
3891 // if preloadHints has a MAP that means that the
3892 // init segment is going to change. We cannot use any of the parts
3893 // from this preload segment.
3894 if (preloadSegment.preloadHints) {
3895 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
3896 if (preloadSegment.preloadHints[i].type === 'MAP') {
3897 return segments;
3898 }
3899 }
3900 } // set the duration for our preload segment to target duration.
3901
3902
3903 preloadSegment.duration = media.targetDuration;
3904 preloadSegment.preload = true;
3905 segments.push(preloadSegment);
3906 }
3907
3908 return segments;
3909 }; // consider the playlist unchanged if the playlist object is the same or
3910 // the number of segments is equal, the media sequence number is unchanged,
3911 // and this playlist hasn't become the end of the playlist
3912
3913
3914 var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
3915 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
3916 };
3917 /**
3918 * Returns a new master playlist that is the result of merging an
3919 * updated media playlist into the original version. If the
3920 * updated media playlist does not match any of the playlist
3921 * entries in the original master playlist, null is returned.
3922 *
3923 * @param {Object} master a parsed master M3U8 object
3924 * @param {Object} media a parsed media M3U8 object
3925 * @return {Object} a new object that represents the original
3926 * master playlist with the updated media playlist merged in, or
3927 * null if the merge produced no change.
3928 */
3929
3930 var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
3931 if (unchangedCheck === void 0) {
3932 unchangedCheck = isPlaylistUnchanged;
3933 }
3934
3935 var result = mergeOptions$2(master, {});
3936 var oldMedia = result.playlists[newMedia.id];
3937
3938 if (!oldMedia) {
3939 return null;
3940 }
3941
3942 if (unchangedCheck(oldMedia, newMedia)) {
3943 return null;
3944 }
3945
3946 newMedia.segments = getAllSegments(newMedia);
3947 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
3948
3949 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
3950 delete mergedPlaylist.preloadSegment;
3951 } // if the update could overlap existing segment information, merge the two segment lists
3952
3953
3954 if (oldMedia.segments) {
3955 if (newMedia.skip) {
3956 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
3957 // old properties into the new segments
3958
3959 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
3960 newMedia.segments.unshift({
3961 skipped: true
3962 });
3963 }
3964 }
3965
3966 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
3967 } // resolve any segment URIs to prevent us from having to do it later
3968
3969
3970 mergedPlaylist.segments.forEach(function (segment) {
3971 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
3972 }); // TODO Right now in the playlists array there are two references to each playlist, one
3973 // that is referenced by index, and one by URI. The index reference may no longer be
3974 // necessary.
3975
3976 for (var _i = 0; _i < result.playlists.length; _i++) {
3977 if (result.playlists[_i].id === newMedia.id) {
3978 result.playlists[_i] = mergedPlaylist;
3979 }
3980 }
3981
3982 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
3983
3984 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
3985
3986 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
3987 if (!properties.playlists) {
3988 return;
3989 }
3990
3991 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
3992 if (newMedia.id === properties.playlists[_i2].id) {
3993 properties.playlists[_i2] = mergedPlaylist;
3994 }
3995 }
3996 });
3997 return result;
3998 };
3999 /**
4000 * Calculates the time to wait before refreshing a live playlist
4001 *
4002 * @param {Object} media
4003 * The current media
4004 * @param {boolean} update
4005 * True if there were any updates from the last refresh, false otherwise
4006 * @return {number}
4007 * The time in ms to wait before refreshing the live playlist
4008 */
4009
4010 var refreshDelay = function refreshDelay(media, update) {
4011 var segments = media.segments || [];
4012 var lastSegment = segments[segments.length - 1];
4013 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
4014 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
4015
4016 if (update && lastDuration) {
4017 return lastDuration * 1000;
4018 } // if the playlist is unchanged since the last reload or last segment duration
4019 // cannot be determined, try again after half the target duration
4020
4021
4022 return (media.partTargetDuration || media.targetDuration || 10) * 500;
4023 };
4024 /**
4025 * Load a playlist from a remote location
4026 *
4027 * @class PlaylistLoader
4028 * @extends Stream
4029 * @param {string|Object} src url or object of manifest
4030 * @param {boolean} withCredentials the withCredentials xhr option
4031 * @class
4032 */
4033
4034 var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
4035 inheritsLoose(PlaylistLoader, _EventTarget);
4036
4037 function PlaylistLoader(src, vhs, options) {
4038 var _this;
4039
4040 if (options === void 0) {
4041 options = {};
4042 }
4043
4044 _this = _EventTarget.call(this) || this;
4045
4046 if (!src) {
4047 throw new Error('A non-empty playlist URL or object is required');
4048 }
4049
4050 _this.logger_ = logger('PlaylistLoader');
4051 var _options = options,
4052 _options$withCredenti = _options.withCredentials,
4053 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
4054 _options$handleManife = _options.handleManifestRedirects,
4055 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
4056 _this.src = src;
4057 _this.vhs_ = vhs;
4058 _this.withCredentials = withCredentials;
4059 _this.handleManifestRedirects = handleManifestRedirects;
4060 var vhsOptions = vhs.options_;
4061 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
4062 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
4063 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
4064
4065 if (videojs__default["default"].browser.IE_VERSION) {
4066 _this.experimentalLLHLS = false;
4067 } // initialize the loader state
4068
4069
4070 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
4071
4072 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(assertThisInitialized(_this));
4073
4074 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
4075
4076 return _this;
4077 }
4078
4079 var _proto = PlaylistLoader.prototype;
4080
4081 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
4082 var _this2 = this;
4083
4084 if (this.state !== 'HAVE_METADATA') {
4085 // only refresh the media playlist if no other activity is going on
4086 return;
4087 }
4088
4089 var media = this.media();
4090 var uri = resolveUrl(this.master.uri, media.uri);
4091
4092 if (this.experimentalLLHLS) {
4093 uri = addLLHLSQueryDirectives(uri, media);
4094 }
4095
4096 this.state = 'HAVE_CURRENT_METADATA';
4097 this.request = this.vhs_.xhr({
4098 uri: uri,
4099 withCredentials: this.withCredentials
4100 }, function (error, req) {
4101 // disposed
4102 if (!_this2.request) {
4103 return;
4104 }
4105
4106 if (error) {
4107 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
4108 }
4109
4110 _this2.haveMetadata({
4111 playlistString: _this2.request.responseText,
4112 url: _this2.media().uri,
4113 id: _this2.media().id
4114 });
4115 });
4116 };
4117
4118 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
4119 var uri = playlist.uri,
4120 id = playlist.id; // any in-flight request is now finished
4121
4122 this.request = null;
4123
4124 if (startingState) {
4125 this.state = startingState;
4126 }
4127
4128 this.error = {
4129 playlist: this.master.playlists[id],
4130 status: xhr.status,
4131 message: "HLS playlist request error at URL: " + uri + ".",
4132 responseText: xhr.responseText,
4133 code: xhr.status >= 500 ? 4 : 2
4134 };
4135 this.trigger('error');
4136 };
4137
4138 _proto.parseManifest_ = function parseManifest_(_ref) {
4139 var _this3 = this;
4140
4141 var url = _ref.url,
4142 manifestString = _ref.manifestString;
4143 return parseManifest({
4144 onwarn: function onwarn(_ref2) {
4145 var message = _ref2.message;
4146 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
4147 },
4148 oninfo: function oninfo(_ref3) {
4149 var message = _ref3.message;
4150 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
4151 },
4152 manifestString: manifestString,
4153 customTagParsers: this.customTagParsers,
4154 customTagMappers: this.customTagMappers,
4155 experimentalLLHLS: this.experimentalLLHLS
4156 });
4157 }
4158 /**
4159 * Update the playlist loader's state in response to a new or updated playlist.
4160 *
4161 * @param {string} [playlistString]
4162 * Playlist string (if playlistObject is not provided)
4163 * @param {Object} [playlistObject]
4164 * Playlist object (if playlistString is not provided)
4165 * @param {string} url
4166 * URL of playlist
4167 * @param {string} id
4168 * ID to use for playlist
4169 */
4170 ;
4171
4172 _proto.haveMetadata = function haveMetadata(_ref4) {
4173 var playlistString = _ref4.playlistString,
4174 playlistObject = _ref4.playlistObject,
4175 url = _ref4.url,
4176 id = _ref4.id;
4177 // any in-flight request is now finished
4178 this.request = null;
4179 this.state = 'HAVE_METADATA';
4180 var playlist = playlistObject || this.parseManifest_({
4181 url: url,
4182 manifestString: playlistString
4183 });
4184 playlist.lastRequest = Date.now();
4185 setupMediaPlaylist({
4186 playlist: playlist,
4187 uri: url,
4188 id: id
4189 }); // merge this playlist into the master
4190
4191 var update = updateMaster$1(this.master, playlist);
4192 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
4193 this.pendingMedia_ = null;
4194
4195 if (update) {
4196 this.master = update;
4197 this.media_ = this.master.playlists[id];
4198 } else {
4199 this.trigger('playlistunchanged');
4200 }
4201
4202 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
4203 this.trigger('loadedplaylist');
4204 }
4205 /**
4206 * Abort any outstanding work and clean up.
4207 */
4208 ;
4209
4210 _proto.dispose = function dispose() {
4211 this.trigger('dispose');
4212 this.stopRequest();
4213 window.clearTimeout(this.mediaUpdateTimeout);
4214 window.clearTimeout(this.finalRenditionTimeout);
4215 this.off();
4216 };
4217
4218 _proto.stopRequest = function stopRequest() {
4219 if (this.request) {
4220 var oldRequest = this.request;
4221 this.request = null;
4222 oldRequest.onreadystatechange = null;
4223 oldRequest.abort();
4224 }
4225 }
4226 /**
4227 * When called without any arguments, returns the currently
4228 * active media playlist. When called with a single argument,
4229 * triggers the playlist loader to asynchronously switch to the
4230 * specified media playlist. Calling this method while the
4231 * loader is in the HAVE_NOTHING causes an error to be emitted
4232 * but otherwise has no effect.
4233 *
4234 * @param {Object=} playlist the parsed media playlist
4235 * object to switch to
4236 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
4237 *
4238 * @return {Playlist} the current loaded media
4239 */
4240 ;
4241
4242 _proto.media = function media(playlist, shouldDelay) {
4243 var _this4 = this;
4244
4245 // getter
4246 if (!playlist) {
4247 return this.media_;
4248 } // setter
4249
4250
4251 if (this.state === 'HAVE_NOTHING') {
4252 throw new Error('Cannot switch media playlist from ' + this.state);
4253 } // find the playlist object if the target playlist has been
4254 // specified by URI
4255
4256
4257 if (typeof playlist === 'string') {
4258 if (!this.master.playlists[playlist]) {
4259 throw new Error('Unknown playlist URI: ' + playlist);
4260 }
4261
4262 playlist = this.master.playlists[playlist];
4263 }
4264
4265 window.clearTimeout(this.finalRenditionTimeout);
4266
4267 if (shouldDelay) {
4268 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
4269 this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);
4270 return;
4271 }
4272
4273 var startingState = this.state;
4274 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
4275 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
4276
4277 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
4278 // media playlist or, for the case of demuxed audio, a resolved audio media group)
4279 playlist.endList && playlist.segments.length) {
4280 // abort outstanding playlist requests
4281 if (this.request) {
4282 this.request.onreadystatechange = null;
4283 this.request.abort();
4284 this.request = null;
4285 }
4286
4287 this.state = 'HAVE_METADATA';
4288 this.media_ = playlist; // trigger media change if the active media has been updated
4289
4290 if (mediaChange) {
4291 this.trigger('mediachanging');
4292
4293 if (startingState === 'HAVE_MASTER') {
4294 // The initial playlist was a master manifest, and the first media selected was
4295 // also provided (in the form of a resolved playlist object) as part of the
4296 // source object (rather than just a URL). Therefore, since the media playlist
4297 // doesn't need to be requested, loadedmetadata won't trigger as part of the
4298 // normal flow, and needs an explicit trigger here.
4299 this.trigger('loadedmetadata');
4300 } else {
4301 this.trigger('mediachange');
4302 }
4303 }
4304
4305 return;
4306 } // We update/set the timeout here so that live playlists
4307 // that are not a media change will "start" the loader as expected.
4308 // We expect that this function will start the media update timeout
4309 // cycle again. This also prevents a playlist switch failure from
4310 // causing us to stall during live.
4311
4312
4313 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
4314
4315 if (!mediaChange) {
4316 return;
4317 }
4318
4319 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
4320
4321 if (this.request) {
4322 if (playlist.resolvedUri === this.request.url) {
4323 // requesting to switch to the same playlist multiple times
4324 // has no effect after the first
4325 return;
4326 }
4327
4328 this.request.onreadystatechange = null;
4329 this.request.abort();
4330 this.request = null;
4331 } // request the new playlist
4332
4333
4334 if (this.media_) {
4335 this.trigger('mediachanging');
4336 }
4337
4338 this.pendingMedia_ = playlist;
4339 this.request = this.vhs_.xhr({
4340 uri: playlist.resolvedUri,
4341 withCredentials: this.withCredentials
4342 }, function (error, req) {
4343 // disposed
4344 if (!_this4.request) {
4345 return;
4346 }
4347
4348 playlist.lastRequest = Date.now();
4349 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
4350
4351 if (error) {
4352 return _this4.playlistRequestError(_this4.request, playlist, startingState);
4353 }
4354
4355 _this4.haveMetadata({
4356 playlistString: req.responseText,
4357 url: playlist.uri,
4358 id: playlist.id
4359 }); // fire loadedmetadata the first time a media playlist is loaded
4360
4361
4362 if (startingState === 'HAVE_MASTER') {
4363 _this4.trigger('loadedmetadata');
4364 } else {
4365 _this4.trigger('mediachange');
4366 }
4367 });
4368 }
4369 /**
4370 * pause loading of the playlist
4371 */
4372 ;
4373
4374 _proto.pause = function pause() {
4375 if (this.mediaUpdateTimeout) {
4376 window.clearTimeout(this.mediaUpdateTimeout);
4377 this.mediaUpdateTimeout = null;
4378 }
4379
4380 this.stopRequest();
4381
4382 if (this.state === 'HAVE_NOTHING') {
4383 // If we pause the loader before any data has been retrieved, its as if we never
4384 // started, so reset to an unstarted state.
4385 this.started = false;
4386 } // Need to restore state now that no activity is happening
4387
4388
4389 if (this.state === 'SWITCHING_MEDIA') {
4390 // if the loader was in the process of switching media, it should either return to
4391 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
4392 // playlist yet. This is determined by the existence of loader.media_
4393 if (this.media_) {
4394 this.state = 'HAVE_METADATA';
4395 } else {
4396 this.state = 'HAVE_MASTER';
4397 }
4398 } else if (this.state === 'HAVE_CURRENT_METADATA') {
4399 this.state = 'HAVE_METADATA';
4400 }
4401 }
4402 /**
4403 * start loading of the playlist
4404 */
4405 ;
4406
4407 _proto.load = function load(shouldDelay) {
4408 var _this5 = this;
4409
4410 if (this.mediaUpdateTimeout) {
4411 window.clearTimeout(this.mediaUpdateTimeout);
4412 this.mediaUpdateTimeout = null;
4413 }
4414
4415 var media = this.media();
4416
4417 if (shouldDelay) {
4418 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
4419 this.mediaUpdateTimeout = window.setTimeout(function () {
4420 _this5.mediaUpdateTimeout = null;
4421
4422 _this5.load();
4423 }, delay);
4424 return;
4425 }
4426
4427 if (!this.started) {
4428 this.start();
4429 return;
4430 }
4431
4432 if (media && !media.endList) {
4433 this.trigger('mediaupdatetimeout');
4434 } else {
4435 this.trigger('loadedplaylist');
4436 }
4437 };
4438
4439 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
4440 var _this6 = this;
4441
4442 if (this.mediaUpdateTimeout) {
4443 window.clearTimeout(this.mediaUpdateTimeout);
4444 this.mediaUpdateTimeout = null;
4445 } // we only have use mediaupdatetimeout for live playlists.
4446
4447
4448 if (!this.media() || this.media().endList) {
4449 return;
4450 }
4451
4452 this.mediaUpdateTimeout = window.setTimeout(function () {
4453 _this6.mediaUpdateTimeout = null;
4454
4455 _this6.trigger('mediaupdatetimeout');
4456
4457 _this6.updateMediaUpdateTimeout_(delay);
4458 }, delay);
4459 }
4460 /**
4461 * start loading of the playlist
4462 */
4463 ;
4464
4465 _proto.start = function start() {
4466 var _this7 = this;
4467
4468 this.started = true;
4469
4470 if (typeof this.src === 'object') {
4471 // in the case of an entirely constructed manifest object (meaning there's no actual
4472 // manifest on a server), default the uri to the page's href
4473 if (!this.src.uri) {
4474 this.src.uri = window.location.href;
4475 } // resolvedUri is added on internally after the initial request. Since there's no
4476 // request for pre-resolved manifests, add on resolvedUri here.
4477
4478
4479 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
4480 // request can be skipped (since the top level of the manifest, at a minimum, is
4481 // already available as a parsed manifest object). However, if the manifest object
4482 // represents a master playlist, some media playlists may need to be resolved before
4483 // the starting segment list is available. Therefore, go directly to setup of the
4484 // initial playlist, and let the normal flow continue from there.
4485 //
4486 // Note that the call to setup is asynchronous, as other sections of VHS may assume
4487 // that the first request is asynchronous.
4488
4489 setTimeout(function () {
4490 _this7.setupInitialPlaylist(_this7.src);
4491 }, 0);
4492 return;
4493 } // request the specified URL
4494
4495
4496 this.request = this.vhs_.xhr({
4497 uri: this.src,
4498 withCredentials: this.withCredentials
4499 }, function (error, req) {
4500 // disposed
4501 if (!_this7.request) {
4502 return;
4503 } // clear the loader's request reference
4504
4505
4506 _this7.request = null;
4507
4508 if (error) {
4509 _this7.error = {
4510 status: req.status,
4511 message: "HLS playlist request error at URL: " + _this7.src + ".",
4512 responseText: req.responseText,
4513 // MEDIA_ERR_NETWORK
4514 code: 2
4515 };
4516
4517 if (_this7.state === 'HAVE_NOTHING') {
4518 _this7.started = false;
4519 }
4520
4521 return _this7.trigger('error');
4522 }
4523
4524 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
4525
4526 var manifest = _this7.parseManifest_({
4527 manifestString: req.responseText,
4528 url: _this7.src
4529 });
4530
4531 _this7.setupInitialPlaylist(manifest);
4532 });
4533 };
4534
4535 _proto.srcUri = function srcUri() {
4536 return typeof this.src === 'string' ? this.src : this.src.uri;
4537 }
4538 /**
4539 * Given a manifest object that's either a master or media playlist, trigger the proper
4540 * events and set the state of the playlist loader.
4541 *
4542 * If the manifest object represents a master playlist, `loadedplaylist` will be
4543 * triggered to allow listeners to select a playlist. If none is selected, the loader
4544 * will default to the first one in the playlists array.
4545 *
4546 * If the manifest object represents a media playlist, `loadedplaylist` will be
4547 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
4548 *
4549 * In the case of a media playlist, a master playlist object wrapper with one playlist
4550 * will be created so that all logic can handle playlists in the same fashion (as an
4551 * assumed manifest object schema).
4552 *
4553 * @param {Object} manifest
4554 * The parsed manifest object
4555 */
4556 ;
4557
4558 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
4559 this.state = 'HAVE_MASTER';
4560
4561 if (manifest.playlists) {
4562 this.master = manifest;
4563 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
4564 // then resolve URIs in advance, as they are usually done after a playlist request,
4565 // which may not happen if the playlist is resolved.
4566
4567 manifest.playlists.forEach(function (playlist) {
4568 playlist.segments = getAllSegments(playlist);
4569 playlist.segments.forEach(function (segment) {
4570 resolveSegmentUris(segment, playlist.resolvedUri);
4571 });
4572 });
4573 this.trigger('loadedplaylist');
4574
4575 if (!this.request) {
4576 // no media playlist was specifically selected so start
4577 // from the first listed one
4578 this.media(this.master.playlists[0]);
4579 }
4580
4581 return;
4582 } // In order to support media playlists passed in as vhs-json, the case where the uri
4583 // is not provided as part of the manifest should be considered, and an appropriate
4584 // default used.
4585
4586
4587 var uri = this.srcUri() || window.location.href;
4588 this.master = masterForMedia(manifest, uri);
4589 this.haveMetadata({
4590 playlistObject: manifest,
4591 url: uri,
4592 id: this.master.playlists[0].id
4593 });
4594 this.trigger('loadedmetadata');
4595 };
4596
4597 return PlaylistLoader;
4598 }(EventTarget$1);
4599
4600 /**
4601 * @file xhr.js
4602 */
4603 var videojsXHR = videojs__default["default"].xhr,
4604 mergeOptions$1 = videojs__default["default"].mergeOptions;
4605
4606 var callbackWrapper = function callbackWrapper(request, error, response, callback) {
4607 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
4608
4609 if (!error && reqResponse) {
4610 request.responseTime = Date.now();
4611 request.roundTripTime = request.responseTime - request.requestTime;
4612 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
4613
4614 if (!request.bandwidth) {
4615 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
4616 }
4617 }
4618
4619 if (response.headers) {
4620 request.responseHeaders = response.headers;
4621 } // videojs.xhr now uses a specific code on the error
4622 // object to signal that a request has timed out instead
4623 // of setting a boolean on the request object
4624
4625
4626 if (error && error.code === 'ETIMEDOUT') {
4627 request.timedout = true;
4628 } // videojs.xhr no longer considers status codes outside of 200 and 0
4629 // (for file uris) to be errors, but the old XHR did, so emulate that
4630 // behavior. Status 206 may be used in response to byterange requests.
4631
4632
4633 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
4634 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
4635 }
4636
4637 callback(error, request);
4638 };
4639
4640 var xhrFactory = function xhrFactory() {
4641 var xhr = function XhrFunction(options, callback) {
4642 // Add a default timeout
4643 options = mergeOptions$1({
4644 timeout: 45e3
4645 }, options); // Allow an optional user-specified function to modify the option
4646 // object before we construct the xhr request
4647
4648 var beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest;
4649
4650 if (beforeRequest && typeof beforeRequest === 'function') {
4651 var newOptions = beforeRequest(options);
4652
4653 if (newOptions) {
4654 options = newOptions;
4655 }
4656 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
4657 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
4658
4659
4660 var xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr;
4661 var request = xhrMethod(options, function (error, response) {
4662 return callbackWrapper(request, error, response, callback);
4663 });
4664 var originalAbort = request.abort;
4665
4666 request.abort = function () {
4667 request.aborted = true;
4668 return originalAbort.apply(request, arguments);
4669 };
4670
4671 request.uri = options.uri;
4672 request.requestTime = Date.now();
4673 return request;
4674 };
4675
4676 xhr.original = true;
4677 return xhr;
4678 };
4679 /**
4680 * Turns segment byterange into a string suitable for use in
4681 * HTTP Range requests
4682 *
4683 * @param {Object} byterange - an object with two values defining the start and end
4684 * of a byte-range
4685 */
4686
4687
4688 var byterangeStr = function byterangeStr(byterange) {
4689 // `byterangeEnd` is one less than `offset + length` because the HTTP range
4690 // header uses inclusive ranges
4691 var byterangeEnd;
4692 var byterangeStart = byterange.offset;
4693
4694 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
4695 byterangeEnd = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
4696 } else {
4697 byterangeEnd = byterange.offset + byterange.length - 1;
4698 }
4699
4700 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
4701 };
4702 /**
4703 * Defines headers for use in the xhr request for a particular segment.
4704 *
4705 * @param {Object} segment - a simplified copy of the segmentInfo object
4706 * from SegmentLoader
4707 */
4708
4709 var segmentXhrHeaders = function segmentXhrHeaders(segment) {
4710 var headers = {};
4711
4712 if (segment.byterange) {
4713 headers.Range = byterangeStr(segment.byterange);
4714 }
4715
4716 return headers;
4717 };
4718
4719 var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
4720 var DASH_REGEX = /^application\/dash\+xml/i;
4721 /**
4722 * Returns a string that describes the type of source based on a video source object's
4723 * media type.
4724 *
4725 * @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
4726 *
4727 * @param {string} type
4728 * Video source object media type
4729 * @return {('hls'|'dash'|'vhs-json'|null)}
4730 * VHS source type string
4731 */
4732
4733 var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
4734 if (MPEGURL_REGEX.test(type)) {
4735 return 'hls';
4736 }
4737
4738 if (DASH_REGEX.test(type)) {
4739 return 'dash';
4740 } // Denotes the special case of a manifest object passed to http-streaming instead of a
4741 // source URL.
4742 //
4743 // See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
4744 //
4745 // In this case, vnd stands for vendor, video.js for the organization, VHS for this
4746 // project, and the +json suffix identifies the structure of the media type.
4747
4748
4749 if (type === 'application/vnd.videojs.vhs+json') {
4750 return 'vhs-json';
4751 }
4752
4753 return null;
4754 };
4755
4756 // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
4757 // we used to do this with log2 but BigInt does not support builtin math
4758 // Math.ceil(log2(x));
4759
4760
4761 var countBits = function countBits(x) {
4762 return x.toString(2).length;
4763 }; // count the number of whole bytes it would take to represent a number
4764
4765 var countBytes = function countBytes(x) {
4766 return Math.ceil(countBits(x) / 8);
4767 };
4768 var isArrayBufferView = function isArrayBufferView(obj) {
4769 if (ArrayBuffer.isView === 'function') {
4770 return ArrayBuffer.isView(obj);
4771 }
4772
4773 return obj && obj.buffer instanceof ArrayBuffer;
4774 };
4775 var isTypedArray = function isTypedArray(obj) {
4776 return isArrayBufferView(obj);
4777 };
4778 var toUint8 = function toUint8(bytes) {
4779 if (bytes instanceof Uint8Array) {
4780 return bytes;
4781 }
4782
4783 if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
4784 // any non-number or NaN leads to empty uint8array
4785 // eslint-disable-next-line
4786 if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
4787 bytes = 0;
4788 } else {
4789 bytes = [bytes];
4790 }
4791 }
4792
4793 return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
4794 };
4795 var BigInt = window.BigInt || Number;
4796 var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
4797 (function () {
4798 var a = new Uint16Array([0xFFCC]);
4799 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
4800
4801 if (b[0] === 0xFF) {
4802 return 'big';
4803 }
4804
4805 if (b[0] === 0xCC) {
4806 return 'little';
4807 }
4808
4809 return 'unknown';
4810 })();
4811 var bytesToNumber = function bytesToNumber(bytes, _temp) {
4812 var _ref = _temp === void 0 ? {} : _temp,
4813 _ref$signed = _ref.signed,
4814 signed = _ref$signed === void 0 ? false : _ref$signed,
4815 _ref$le = _ref.le,
4816 le = _ref$le === void 0 ? false : _ref$le;
4817
4818 bytes = toUint8(bytes);
4819 var fn = le ? 'reduce' : 'reduceRight';
4820 var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
4821 var number = obj.call(bytes, function (total, byte, i) {
4822 var exponent = le ? i : Math.abs(i + 1 - bytes.length);
4823 return total + BigInt(byte) * BYTE_TABLE[exponent];
4824 }, BigInt(0));
4825
4826 if (signed) {
4827 var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
4828 number = BigInt(number);
4829
4830 if (number > max) {
4831 number -= max;
4832 number -= max;
4833 number -= BigInt(2);
4834 }
4835 }
4836
4837 return Number(number);
4838 };
4839 var numberToBytes = function numberToBytes(number, _temp2) {
4840 var _ref2 = _temp2 === void 0 ? {} : _temp2,
4841 _ref2$le = _ref2.le,
4842 le = _ref2$le === void 0 ? false : _ref2$le; // eslint-disable-next-line
4843
4844
4845 if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
4846 number = 0;
4847 }
4848
4849 number = BigInt(number);
4850 var byteCount = countBytes(number);
4851 var bytes = new Uint8Array(new ArrayBuffer(byteCount));
4852
4853 for (var i = 0; i < byteCount; i++) {
4854 var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
4855 bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
4856
4857 if (number < 0) {
4858 bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
4859 bytes[byteIndex] -= i === 0 ? 1 : 2;
4860 }
4861 }
4862
4863 return bytes;
4864 };
4865 var stringToBytes = function stringToBytes(string, stringIsBytes) {
4866 if (typeof string !== 'string' && string && typeof string.toString === 'function') {
4867 string = string.toString();
4868 }
4869
4870 if (typeof string !== 'string') {
4871 return new Uint8Array();
4872 } // If the string already is bytes, we don't have to do this
4873 // otherwise we do this so that we split multi length characters
4874 // into individual bytes
4875
4876
4877 if (!stringIsBytes) {
4878 string = unescape(encodeURIComponent(string));
4879 }
4880
4881 var view = new Uint8Array(string.length);
4882
4883 for (var i = 0; i < string.length; i++) {
4884 view[i] = string.charCodeAt(i);
4885 }
4886
4887 return view;
4888 };
4889 var concatTypedArrays = function concatTypedArrays() {
4890 for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
4891 buffers[_key] = arguments[_key];
4892 }
4893
4894 buffers = buffers.filter(function (b) {
4895 return b && (b.byteLength || b.length) && typeof b !== 'string';
4896 });
4897
4898 if (buffers.length <= 1) {
4899 // for 0 length we will return empty uint8
4900 // for 1 length we return the first uint8
4901 return toUint8(buffers[0]);
4902 }
4903
4904 var totalLen = buffers.reduce(function (total, buf, i) {
4905 return total + (buf.byteLength || buf.length);
4906 }, 0);
4907 var tempBuffer = new Uint8Array(totalLen);
4908 var offset = 0;
4909 buffers.forEach(function (buf) {
4910 buf = toUint8(buf);
4911 tempBuffer.set(buf, offset);
4912 offset += buf.byteLength;
4913 });
4914 return tempBuffer;
4915 };
4916 /**
4917 * Check if the bytes "b" are contained within bytes "a".
4918 *
4919 * @param {Uint8Array|Array} a
4920 * Bytes to check in
4921 *
4922 * @param {Uint8Array|Array} b
4923 * Bytes to check for
4924 *
4925 * @param {Object} options
4926 * options
4927 *
4928 * @param {Array|Uint8Array} [offset=0]
4929 * offset to use when looking at bytes in a
4930 *
4931 * @param {Array|Uint8Array} [mask=[]]
4932 * mask to use on bytes before comparison.
4933 *
4934 * @return {boolean}
4935 * If all bytes in b are inside of a, taking into account
4936 * bit masks.
4937 */
4938
4939 var bytesMatch = function bytesMatch(a, b, _temp3) {
4940 var _ref3 = _temp3 === void 0 ? {} : _temp3,
4941 _ref3$offset = _ref3.offset,
4942 offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
4943 _ref3$mask = _ref3.mask,
4944 mask = _ref3$mask === void 0 ? [] : _ref3$mask;
4945
4946 a = toUint8(a);
4947 b = toUint8(b); // ie 11 does not support uint8 every
4948
4949 var fn = b.every ? b.every : Array.prototype.every;
4950 return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
4951 fn.call(b, function (bByte, i) {
4952 var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
4953 return bByte === aByte;
4954 });
4955 };
4956
4957 /**
4958 * @file bin-utils.js
4959 */
4960
4961 /**
4962 * convert a TimeRange to text
4963 *
4964 * @param {TimeRange} range the timerange to use for conversion
4965 * @param {number} i the iterator on the range to convert
4966 * @return {string} the range in string format
4967 */
4968
4969 var textRange = function textRange(range, i) {
4970 return range.start(i) + '-' + range.end(i);
4971 };
4972 /**
4973 * format a number as hex string
4974 *
4975 * @param {number} e The number
4976 * @param {number} i the iterator
4977 * @return {string} the hex formatted number as a string
4978 */
4979
4980
4981 var formatHexString = function formatHexString(e, i) {
4982 var value = e.toString(16);
4983 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
4984 };
4985
4986 var formatAsciiString = function formatAsciiString(e) {
4987 if (e >= 0x20 && e < 0x7e) {
4988 return String.fromCharCode(e);
4989 }
4990
4991 return '.';
4992 };
4993 /**
4994 * Creates an object for sending to a web worker modifying properties that are TypedArrays
4995 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
4996 *
4997 * @param {Object} message
4998 * Object of properties and values to send to the web worker
4999 * @return {Object}
5000 * Modified message with TypedArray values expanded
5001 * @function createTransferableMessage
5002 */
5003
5004
5005 var createTransferableMessage = function createTransferableMessage(message) {
5006 var transferable = {};
5007 Object.keys(message).forEach(function (key) {
5008 var value = message[key];
5009
5010 if (isArrayBufferView(value)) {
5011 transferable[key] = {
5012 bytes: value.buffer,
5013 byteOffset: value.byteOffset,
5014 byteLength: value.byteLength
5015 };
5016 } else {
5017 transferable[key] = value;
5018 }
5019 });
5020 return transferable;
5021 };
5022 /**
5023 * Returns a unique string identifier for a media initialization
5024 * segment.
5025 *
5026 * @param {Object} initSegment
5027 * the init segment object.
5028 *
5029 * @return {string} the generated init segment id
5030 */
5031
5032 var initSegmentId = function initSegmentId(initSegment) {
5033 var byterange = initSegment.byterange || {
5034 length: Infinity,
5035 offset: 0
5036 };
5037 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
5038 };
5039 /**
5040 * Returns a unique string identifier for a media segment key.
5041 *
5042 * @param {Object} key the encryption key
5043 * @return {string} the unique id for the media segment key.
5044 */
5045
5046 var segmentKeyId = function segmentKeyId(key) {
5047 return key.resolvedUri;
5048 };
5049 /**
5050 * utils to help dump binary data to the console
5051 *
5052 * @param {Array|TypedArray} data
5053 * data to dump to a string
5054 *
5055 * @return {string} the data as a hex string.
5056 */
5057
5058 var hexDump = function hexDump(data) {
5059 var bytes = Array.prototype.slice.call(data);
5060 var step = 16;
5061 var result = '';
5062 var hex;
5063 var ascii;
5064
5065 for (var j = 0; j < bytes.length / step; j++) {
5066 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
5067 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
5068 result += hex + ' ' + ascii + '\n';
5069 }
5070
5071 return result;
5072 };
5073 var tagDump = function tagDump(_ref) {
5074 var bytes = _ref.bytes;
5075 return hexDump(bytes);
5076 };
5077 var textRanges = function textRanges(ranges) {
5078 var result = '';
5079 var i;
5080
5081 for (i = 0; i < ranges.length; i++) {
5082 result += textRange(ranges, i) + ' ';
5083 }
5084
5085 return result;
5086 };
5087
5088 var utils = /*#__PURE__*/Object.freeze({
5089 __proto__: null,
5090 createTransferableMessage: createTransferableMessage,
5091 initSegmentId: initSegmentId,
5092 segmentKeyId: segmentKeyId,
5093 hexDump: hexDump,
5094 tagDump: tagDump,
5095 textRanges: textRanges
5096 });
5097
5098 // TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
5099 // 25% was arbitrarily chosen, and may need to be refined over time.
5100
5101 var SEGMENT_END_FUDGE_PERCENT = 0.25;
5102 /**
5103 * Converts a player time (any time that can be gotten/set from player.currentTime(),
5104 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
5105 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
5106 *
5107 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
5108 * point" (a point where we have a mapping from program time to player time, with player
5109 * time being the post transmux start of the segment).
5110 *
5111 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
5112 *
5113 * @param {number} playerTime the player time
5114 * @param {Object} segment the segment which contains the player time
5115 * @return {Date} program time
5116 */
5117
5118 var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
5119 if (!segment.dateTimeObject) {
5120 // Can't convert without an "anchor point" for the program time (i.e., a time that can
5121 // be used to map the start of a segment with a real world time).
5122 return null;
5123 }
5124
5125 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
5126 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
5127
5128 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
5129 var offsetFromSegmentStart = playerTime - startOfSegment;
5130 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
5131 };
5132 var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
5133 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
5134 };
5135 /**
5136 * Finds a segment that contains the time requested given as an ISO-8601 string. The
5137 * returned segment might be an estimate or an accurate match.
5138 *
5139 * @param {string} programTime The ISO-8601 programTime to find a match for
5140 * @param {Object} playlist A playlist object to search within
5141 */
5142
5143 var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
5144 // Assumptions:
5145 // - verifyProgramDateTimeTags has already been run
5146 // - live streams have been started
5147 var dateTimeObject;
5148
5149 try {
5150 dateTimeObject = new Date(programTime);
5151 } catch (e) {
5152 return null;
5153 }
5154
5155 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5156 return null;
5157 }
5158
5159 var segment = playlist.segments[0];
5160
5161 if (dateTimeObject < segment.dateTimeObject) {
5162 // Requested time is before stream start.
5163 return null;
5164 }
5165
5166 for (var i = 0; i < playlist.segments.length - 1; i++) {
5167 segment = playlist.segments[i];
5168 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
5169
5170 if (dateTimeObject < nextSegmentStart) {
5171 break;
5172 }
5173 }
5174
5175 var lastSegment = playlist.segments[playlist.segments.length - 1];
5176 var lastSegmentStart = lastSegment.dateTimeObject;
5177 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
5178 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
5179
5180 if (dateTimeObject > lastSegmentEnd) {
5181 // Beyond the end of the stream, or our best guess of the end of the stream.
5182 return null;
5183 }
5184
5185 if (dateTimeObject > lastSegmentStart) {
5186 segment = lastSegment;
5187 }
5188
5189 return {
5190 segment: segment,
5191 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
5192 // Although, given that all segments have accurate date time objects, the segment
5193 // selected should be accurate, unless the video has been transmuxed at some point
5194 // (determined by the presence of the videoTimingInfo object), the segment's "player
5195 // time" (the start time in the player) can't be considered accurate.
5196 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5197 };
5198 };
5199 /**
5200 * Finds a segment that contains the given player time(in seconds).
5201 *
5202 * @param {number} time The player time to find a match for
5203 * @param {Object} playlist A playlist object to search within
5204 */
5205
5206 var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
5207 // Assumptions:
5208 // - there will always be a segment.duration
5209 // - we can start from zero
5210 // - segments are in time order
5211 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
5212 return null;
5213 }
5214
5215 var segmentEnd = 0;
5216 var segment;
5217
5218 for (var i = 0; i < playlist.segments.length; i++) {
5219 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
5220 // should contain the most accurate values we have for the segment's player times.
5221 //
5222 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
5223 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
5224 // calculate an end value.
5225
5226 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
5227
5228 if (time <= segmentEnd) {
5229 break;
5230 }
5231 }
5232
5233 var lastSegment = playlist.segments[playlist.segments.length - 1];
5234
5235 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
5236 // The time requested is beyond the stream end.
5237 return null;
5238 }
5239
5240 if (time > segmentEnd) {
5241 // The time is within or beyond the last segment.
5242 //
5243 // Check to see if the time is beyond a reasonable guess of the end of the stream.
5244 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
5245 // Technically, because the duration value is only an estimate, the time may still
5246 // exist in the last segment, however, there isn't enough information to make even
5247 // a reasonable estimate.
5248 return null;
5249 }
5250
5251 segment = lastSegment;
5252 }
5253
5254 return {
5255 segment: segment,
5256 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
5257 // Because videoTimingInfo is only set after transmux, it is the only way to get
5258 // accurate timing values.
5259 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
5260 };
5261 };
5262 /**
5263 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
5264 * If the offset returned is positive, the programTime occurs after the
5265 * comparisonTimestamp.
5266 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
5267 *
5268 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
5269 * @param {string} programTime The programTime as an ISO-8601 string
5270 * @return {number} offset
5271 */
5272
5273 var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
5274 var segmentDateTime;
5275 var programDateTime;
5276
5277 try {
5278 segmentDateTime = new Date(comparisonTimeStamp);
5279 programDateTime = new Date(programTime);
5280 } catch (e) {// TODO handle error
5281 }
5282
5283 var segmentTimeEpoch = segmentDateTime.getTime();
5284 var programTimeEpoch = programDateTime.getTime();
5285 return (programTimeEpoch - segmentTimeEpoch) / 1000;
5286 };
5287 /**
5288 * Checks that all segments in this playlist have programDateTime tags.
5289 *
5290 * @param {Object} playlist A playlist object
5291 */
5292
5293 var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
5294 if (!playlist.segments || playlist.segments.length === 0) {
5295 return false;
5296 }
5297
5298 for (var i = 0; i < playlist.segments.length; i++) {
5299 var segment = playlist.segments[i];
5300
5301 if (!segment.dateTimeObject) {
5302 return false;
5303 }
5304 }
5305
5306 return true;
5307 };
5308 /**
5309 * Returns the programTime of the media given a playlist and a playerTime.
5310 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
5311 * If the segments containing the time requested have not been buffered yet, an estimate
5312 * may be returned to the callback.
5313 *
5314 * @param {Object} args
5315 * @param {Object} args.playlist A playlist object to search within
5316 * @param {number} time A playerTime in seconds
5317 * @param {Function} callback(err, programTime)
5318 * @return {string} err.message A detailed error message
5319 * @return {Object} programTime
5320 * @return {number} programTime.mediaSeconds The streamTime in seconds
5321 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
5322 */
5323
5324 var getProgramTime = function getProgramTime(_ref) {
5325 var playlist = _ref.playlist,
5326 _ref$time = _ref.time,
5327 time = _ref$time === void 0 ? undefined : _ref$time,
5328 callback = _ref.callback;
5329
5330 if (!callback) {
5331 throw new Error('getProgramTime: callback must be provided');
5332 }
5333
5334 if (!playlist || time === undefined) {
5335 return callback({
5336 message: 'getProgramTime: playlist and time must be provided'
5337 });
5338 }
5339
5340 var matchedSegment = findSegmentForPlayerTime(time, playlist);
5341
5342 if (!matchedSegment) {
5343 return callback({
5344 message: 'valid programTime was not found'
5345 });
5346 }
5347
5348 if (matchedSegment.type === 'estimate') {
5349 return callback({
5350 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
5351 seekTime: matchedSegment.estimatedStart
5352 });
5353 }
5354
5355 var programTimeObject = {
5356 mediaSeconds: time
5357 };
5358 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
5359
5360 if (programTime) {
5361 programTimeObject.programDateTime = programTime.toISOString();
5362 }
5363
5364 return callback(null, programTimeObject);
5365 };
5366 /**
5367 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
5368 *
5369 * @param {Object} args
5370 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
5371 * @param {Object} args.playlist A playlist to look within
5372 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
5373 * @param {Function} args.seekTo A method to perform a seek
5374 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
5375 * @param {Object} args.tech The tech to seek on
5376 * @param {Function} args.callback(err, newTime) A callback to return the new time to
5377 * @return {string} err.message A detailed error message
5378 * @return {number} newTime The exact time that was seeked to in seconds
5379 */
5380
5381 var seekToProgramTime = function seekToProgramTime(_ref2) {
5382 var programTime = _ref2.programTime,
5383 playlist = _ref2.playlist,
5384 _ref2$retryCount = _ref2.retryCount,
5385 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
5386 seekTo = _ref2.seekTo,
5387 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
5388 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
5389 tech = _ref2.tech,
5390 callback = _ref2.callback;
5391
5392 if (!callback) {
5393 throw new Error('seekToProgramTime: callback must be provided');
5394 }
5395
5396 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
5397 return callback({
5398 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
5399 });
5400 }
5401
5402 if (!playlist.endList && !tech.hasStarted_) {
5403 return callback({
5404 message: 'player must be playing a live stream to start buffering'
5405 });
5406 }
5407
5408 if (!verifyProgramDateTimeTags(playlist)) {
5409 return callback({
5410 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
5411 });
5412 }
5413
5414 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
5415
5416 if (!matchedSegment) {
5417 return callback({
5418 message: programTime + " was not found in the stream"
5419 });
5420 }
5421
5422 var segment = matchedSegment.segment;
5423 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
5424
5425 if (matchedSegment.type === 'estimate') {
5426 // we've run out of retries
5427 if (retryCount === 0) {
5428 return callback({
5429 message: programTime + " is not buffered yet. Try again"
5430 });
5431 }
5432
5433 seekTo(matchedSegment.estimatedStart + mediaOffset);
5434 tech.one('seeked', function () {
5435 seekToProgramTime({
5436 programTime: programTime,
5437 playlist: playlist,
5438 retryCount: retryCount - 1,
5439 seekTo: seekTo,
5440 pauseAfterSeek: pauseAfterSeek,
5441 tech: tech,
5442 callback: callback
5443 });
5444 });
5445 return;
5446 } // Since the segment.start value is determined from the buffered end or ending time
5447 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
5448 // modifications.
5449
5450
5451 var seekToTime = segment.start + mediaOffset;
5452
5453 var seekedCallback = function seekedCallback() {
5454 return callback(null, tech.currentTime());
5455 }; // listen for seeked event
5456
5457
5458 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
5459
5460 if (pauseAfterSeek) {
5461 tech.pause();
5462 }
5463
5464 seekTo(seekToTime);
5465 };
5466
5467 /**
5468 * Loops through all supported media groups in master and calls the provided
5469 * callback for each group
5470 *
5471 * @param {Object} master
5472 * The parsed master manifest object
5473 * @param {string[]} groups
5474 * The media groups to call the callback for
5475 * @param {Function} callback
5476 * Callback to call for each media group
5477 */
5478 var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
5479 groups.forEach(function (mediaType) {
5480 for (var groupKey in master.mediaGroups[mediaType]) {
5481 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
5482 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
5483 callback(mediaProperties, mediaType, groupKey, labelKey);
5484 }
5485 }
5486 });
5487 };
5488
5489 /*! @name mpd-parser @version 0.22.1 @license Apache-2.0 */
5490
5491 var isObject = function isObject(obj) {
5492 return !!obj && typeof obj === 'object';
5493 };
5494
5495 var merge = function merge() {
5496 for (var _len = arguments.length, objects = new Array(_len), _key = 0; _key < _len; _key++) {
5497 objects[_key] = arguments[_key];
5498 }
5499
5500 return objects.reduce(function (result, source) {
5501 if (typeof source !== 'object') {
5502 return result;
5503 }
5504
5505 Object.keys(source).forEach(function (key) {
5506 if (Array.isArray(result[key]) && Array.isArray(source[key])) {
5507 result[key] = result[key].concat(source[key]);
5508 } else if (isObject(result[key]) && isObject(source[key])) {
5509 result[key] = merge(result[key], source[key]);
5510 } else {
5511 result[key] = source[key];
5512 }
5513 });
5514 return result;
5515 }, {});
5516 };
5517
5518 var values = function values(o) {
5519 return Object.keys(o).map(function (k) {
5520 return o[k];
5521 });
5522 };
5523
5524 var range = function range(start, end) {
5525 var result = [];
5526
5527 for (var i = start; i < end; i++) {
5528 result.push(i);
5529 }
5530
5531 return result;
5532 };
5533
5534 var flatten = function flatten(lists) {
5535 return lists.reduce(function (x, y) {
5536 return x.concat(y);
5537 }, []);
5538 };
5539
5540 var from = function from(list) {
5541 if (!list.length) {
5542 return [];
5543 }
5544
5545 var result = [];
5546
5547 for (var i = 0; i < list.length; i++) {
5548 result.push(list[i]);
5549 }
5550
5551 return result;
5552 };
5553
5554 var findIndexes = function findIndexes(l, key) {
5555 return l.reduce(function (a, e, i) {
5556 if (e[key]) {
5557 a.push(i);
5558 }
5559
5560 return a;
5561 }, []);
5562 };
5563 /**
5564 * Returns the first index that satisfies the matching function, or -1 if not found.
5565 *
5566 * Only necessary because of IE11 support.
5567 *
5568 * @param {Array} list - the list to search through
5569 * @param {Function} matchingFunction - the matching function
5570 *
5571 * @return {number} the matching index or -1 if not found
5572 */
5573
5574
5575 var findIndex = function findIndex(list, matchingFunction) {
5576 for (var i = 0; i < list.length; i++) {
5577 if (matchingFunction(list[i])) {
5578 return i;
5579 }
5580 }
5581
5582 return -1;
5583 };
5584 /**
5585 * Returns a union of the included lists provided each element can be identified by a key.
5586 *
5587 * @param {Array} list - list of lists to get the union of
5588 * @param {Function} keyFunction - the function to use as a key for each element
5589 *
5590 * @return {Array} the union of the arrays
5591 */
5592
5593
5594 var union = function union(lists, keyFunction) {
5595 return values(lists.reduce(function (acc, list) {
5596 list.forEach(function (el) {
5597 acc[keyFunction(el)] = el;
5598 });
5599 return acc;
5600 }, {}));
5601 };
5602
5603 var errors = {
5604 INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',
5605 DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',
5606 DASH_INVALID_XML: 'DASH_INVALID_XML',
5607 NO_BASE_URL: 'NO_BASE_URL',
5608 MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',
5609 SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',
5610 UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'
5611 };
5612 /**
5613 * @typedef {Object} SingleUri
5614 * @property {string} uri - relative location of segment
5615 * @property {string} resolvedUri - resolved location of segment
5616 * @property {Object} byterange - Object containing information on how to make byte range
5617 * requests following byte-range-spec per RFC2616.
5618 * @property {String} byterange.length - length of range request
5619 * @property {String} byterange.offset - byte offset of range request
5620 *
5621 * @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1
5622 */
5623
5624 /**
5625 * Converts a URLType node (5.3.9.2.3 Table 13) to a segment object
5626 * that conforms to how m3u8-parser is structured
5627 *
5628 * @see https://github.com/videojs/m3u8-parser
5629 *
5630 * @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes
5631 * @param {string} source - source url for segment
5632 * @param {string} range - optional range used for range calls,
5633 * follows RFC 2616, Clause 14.35.1
5634 * @return {SingleUri} full segment information transformed into a format similar
5635 * to m3u8-parser
5636 */
5637
5638 var urlTypeToSegment = function urlTypeToSegment(_ref) {
5639 var _ref$baseUrl = _ref.baseUrl,
5640 baseUrl = _ref$baseUrl === void 0 ? '' : _ref$baseUrl,
5641 _ref$source = _ref.source,
5642 source = _ref$source === void 0 ? '' : _ref$source,
5643 _ref$range = _ref.range,
5644 range = _ref$range === void 0 ? '' : _ref$range,
5645 _ref$indexRange = _ref.indexRange,
5646 indexRange = _ref$indexRange === void 0 ? '' : _ref$indexRange;
5647 var segment = {
5648 uri: source,
5649 resolvedUri: resolveUrl$1(baseUrl || '', source)
5650 };
5651
5652 if (range || indexRange) {
5653 var rangeStr = range ? range : indexRange;
5654 var ranges = rangeStr.split('-'); // default to parsing this as a BigInt if possible
5655
5656 var startRange = window.BigInt ? window.BigInt(ranges[0]) : parseInt(ranges[0], 10);
5657 var endRange = window.BigInt ? window.BigInt(ranges[1]) : parseInt(ranges[1], 10); // convert back to a number if less than MAX_SAFE_INTEGER
5658
5659 if (startRange < Number.MAX_SAFE_INTEGER && typeof startRange === 'bigint') {
5660 startRange = Number(startRange);
5661 }
5662
5663 if (endRange < Number.MAX_SAFE_INTEGER && typeof endRange === 'bigint') {
5664 endRange = Number(endRange);
5665 }
5666
5667 var length;
5668
5669 if (typeof endRange === 'bigint' || typeof startRange === 'bigint') {
5670 length = window.BigInt(endRange) - window.BigInt(startRange) + window.BigInt(1);
5671 } else {
5672 length = endRange - startRange + 1;
5673 }
5674
5675 if (typeof length === 'bigint' && length < Number.MAX_SAFE_INTEGER) {
5676 length = Number(length);
5677 } // byterange should be inclusive according to
5678 // RFC 2616, Clause 14.35.1
5679
5680
5681 segment.byterange = {
5682 length: length,
5683 offset: startRange
5684 };
5685 }
5686
5687 return segment;
5688 };
5689
5690 var byteRangeToString = function byteRangeToString(byterange) {
5691 // `endRange` is one less than `offset + length` because the HTTP range
5692 // header uses inclusive ranges
5693 var endRange;
5694
5695 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
5696 endRange = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);
5697 } else {
5698 endRange = byterange.offset + byterange.length - 1;
5699 }
5700
5701 return byterange.offset + "-" + endRange;
5702 };
5703 /**
5704 * parse the end number attribue that can be a string
5705 * number, or undefined.
5706 *
5707 * @param {string|number|undefined} endNumber
5708 * The end number attribute.
5709 *
5710 * @return {number|null}
5711 * The result of parsing the end number.
5712 */
5713
5714
5715 var parseEndNumber = function parseEndNumber(endNumber) {
5716 if (endNumber && typeof endNumber !== 'number') {
5717 endNumber = parseInt(endNumber, 10);
5718 }
5719
5720 if (isNaN(endNumber)) {
5721 return null;
5722 }
5723
5724 return endNumber;
5725 };
5726 /**
5727 * Functions for calculating the range of available segments in static and dynamic
5728 * manifests.
5729 */
5730
5731
5732 var segmentRange = {
5733 /**
5734 * Returns the entire range of available segments for a static MPD
5735 *
5736 * @param {Object} attributes
5737 * Inheritied MPD attributes
5738 * @return {{ start: number, end: number }}
5739 * The start and end numbers for available segments
5740 */
5741 static: function _static(attributes) {
5742 var duration = attributes.duration,
5743 _attributes$timescale = attributes.timescale,
5744 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
5745 sourceDuration = attributes.sourceDuration,
5746 periodDuration = attributes.periodDuration;
5747 var endNumber = parseEndNumber(attributes.endNumber);
5748 var segmentDuration = duration / timescale;
5749
5750 if (typeof endNumber === 'number') {
5751 return {
5752 start: 0,
5753 end: endNumber
5754 };
5755 }
5756
5757 if (typeof periodDuration === 'number') {
5758 return {
5759 start: 0,
5760 end: periodDuration / segmentDuration
5761 };
5762 }
5763
5764 return {
5765 start: 0,
5766 end: sourceDuration / segmentDuration
5767 };
5768 },
5769
5770 /**
5771 * Returns the current live window range of available segments for a dynamic MPD
5772 *
5773 * @param {Object} attributes
5774 * Inheritied MPD attributes
5775 * @return {{ start: number, end: number }}
5776 * The start and end numbers for available segments
5777 */
5778 dynamic: function dynamic(attributes) {
5779 var NOW = attributes.NOW,
5780 clientOffset = attributes.clientOffset,
5781 availabilityStartTime = attributes.availabilityStartTime,
5782 _attributes$timescale2 = attributes.timescale,
5783 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
5784 duration = attributes.duration,
5785 _attributes$periodSta = attributes.periodStart,
5786 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
5787 _attributes$minimumUp = attributes.minimumUpdatePeriod,
5788 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp,
5789 _attributes$timeShift = attributes.timeShiftBufferDepth,
5790 timeShiftBufferDepth = _attributes$timeShift === void 0 ? Infinity : _attributes$timeShift;
5791 var endNumber = parseEndNumber(attributes.endNumber); // clientOffset is passed in at the top level of mpd-parser and is an offset calculated
5792 // after retrieving UTC server time.
5793
5794 var now = (NOW + clientOffset) / 1000; // WC stands for Wall Clock.
5795 // Convert the period start time to EPOCH.
5796
5797 var periodStartWC = availabilityStartTime + periodStart; // Period end in EPOCH is manifest's retrieval time + time until next update.
5798
5799 var periodEndWC = now + minimumUpdatePeriod;
5800 var periodDuration = periodEndWC - periodStartWC;
5801 var segmentCount = Math.ceil(periodDuration * timescale / duration);
5802 var availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);
5803 var availableEnd = Math.floor((now - periodStartWC) * timescale / duration);
5804 return {
5805 start: Math.max(0, availableStart),
5806 end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)
5807 };
5808 }
5809 };
5810 /**
5811 * Maps a range of numbers to objects with information needed to build the corresponding
5812 * segment list
5813 *
5814 * @name toSegmentsCallback
5815 * @function
5816 * @param {number} number
5817 * Number of the segment
5818 * @param {number} index
5819 * Index of the number in the range list
5820 * @return {{ number: Number, duration: Number, timeline: Number, time: Number }}
5821 * Object with segment timing and duration info
5822 */
5823
5824 /**
5825 * Returns a callback for Array.prototype.map for mapping a range of numbers to
5826 * information needed to build the segment list.
5827 *
5828 * @param {Object} attributes
5829 * Inherited MPD attributes
5830 * @return {toSegmentsCallback}
5831 * Callback map function
5832 */
5833
5834 var toSegments = function toSegments(attributes) {
5835 return function (number) {
5836 var duration = attributes.duration,
5837 _attributes$timescale3 = attributes.timescale,
5838 timescale = _attributes$timescale3 === void 0 ? 1 : _attributes$timescale3,
5839 periodStart = attributes.periodStart,
5840 _attributes$startNumb = attributes.startNumber,
5841 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb;
5842 return {
5843 number: startNumber + number,
5844 duration: duration / timescale,
5845 timeline: periodStart,
5846 time: number * duration
5847 };
5848 };
5849 };
5850 /**
5851 * Returns a list of objects containing segment timing and duration info used for
5852 * building the list of segments. This uses the @duration attribute specified
5853 * in the MPD manifest to derive the range of segments.
5854 *
5855 * @param {Object} attributes
5856 * Inherited MPD attributes
5857 * @return {{number: number, duration: number, time: number, timeline: number}[]}
5858 * List of Objects with segment timing and duration info
5859 */
5860
5861
5862 var parseByDuration = function parseByDuration(attributes) {
5863 var type = attributes.type,
5864 duration = attributes.duration,
5865 _attributes$timescale4 = attributes.timescale,
5866 timescale = _attributes$timescale4 === void 0 ? 1 : _attributes$timescale4,
5867 periodDuration = attributes.periodDuration,
5868 sourceDuration = attributes.sourceDuration;
5869
5870 var _segmentRange$type = segmentRange[type](attributes),
5871 start = _segmentRange$type.start,
5872 end = _segmentRange$type.end;
5873
5874 var segments = range(start, end).map(toSegments(attributes));
5875
5876 if (type === 'static') {
5877 var index = segments.length - 1; // section is either a period or the full source
5878
5879 var sectionDuration = typeof periodDuration === 'number' ? periodDuration : sourceDuration; // final segment may be less than full segment duration
5880
5881 segments[index].duration = sectionDuration - duration / timescale * index;
5882 }
5883
5884 return segments;
5885 };
5886 /**
5887 * Translates SegmentBase into a set of segments.
5888 * (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
5889 * node should be translated into segment.
5890 *
5891 * @param {Object} attributes
5892 * Object containing all inherited attributes from parent elements with attribute
5893 * names as keys
5894 * @return {Object.<Array>} list of segments
5895 */
5896
5897
5898 var segmentsFromBase = function segmentsFromBase(attributes) {
5899 var baseUrl = attributes.baseUrl,
5900 _attributes$initializ = attributes.initialization,
5901 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ,
5902 sourceDuration = attributes.sourceDuration,
5903 _attributes$indexRang = attributes.indexRange,
5904 indexRange = _attributes$indexRang === void 0 ? '' : _attributes$indexRang,
5905 periodStart = attributes.periodStart,
5906 presentationTime = attributes.presentationTime,
5907 _attributes$number = attributes.number,
5908 number = _attributes$number === void 0 ? 0 : _attributes$number,
5909 duration = attributes.duration; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)
5910
5911 if (!baseUrl) {
5912 throw new Error(errors.NO_BASE_URL);
5913 }
5914
5915 var initSegment = urlTypeToSegment({
5916 baseUrl: baseUrl,
5917 source: initialization.sourceURL,
5918 range: initialization.range
5919 });
5920 var segment = urlTypeToSegment({
5921 baseUrl: baseUrl,
5922 source: baseUrl,
5923 indexRange: indexRange
5924 });
5925 segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source
5926 // (since SegmentBase is only for one total segment)
5927
5928 if (duration) {
5929 var segmentTimeInfo = parseByDuration(attributes);
5930
5931 if (segmentTimeInfo.length) {
5932 segment.duration = segmentTimeInfo[0].duration;
5933 segment.timeline = segmentTimeInfo[0].timeline;
5934 }
5935 } else if (sourceDuration) {
5936 segment.duration = sourceDuration;
5937 segment.timeline = periodStart;
5938 } // If presentation time is provided, these segments are being generated by SIDX
5939 // references, and should use the time provided. For the general case of SegmentBase,
5940 // there should only be one segment in the period, so its presentation time is the same
5941 // as its period start.
5942
5943
5944 segment.presentationTime = presentationTime || periodStart;
5945 segment.number = number;
5946 return [segment];
5947 };
5948 /**
5949 * Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist
5950 * according to the sidx information given.
5951 *
5952 * playlist.sidx has metadadata about the sidx where-as the sidx param
5953 * is the parsed sidx box itself.
5954 *
5955 * @param {Object} playlist the playlist to update the sidx information for
5956 * @param {Object} sidx the parsed sidx box
5957 * @return {Object} the playlist object with the updated sidx information
5958 */
5959
5960
5961 var addSidxSegmentsToPlaylist$1 = function addSidxSegmentsToPlaylist(playlist, sidx, baseUrl) {
5962 // Retain init segment information
5963 var initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial main manifest parsing
5964
5965 var sourceDuration = playlist.sidx.duration; // Retain source timeline
5966
5967 var timeline = playlist.timeline || 0;
5968 var sidxByteRange = playlist.sidx.byterange;
5969 var sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidx
5970
5971 var timescale = sidx.timescale; // referenceType 1 refers to other sidx boxes
5972
5973 var mediaReferences = sidx.references.filter(function (r) {
5974 return r.referenceType !== 1;
5975 });
5976 var segments = [];
5977 var type = playlist.endList ? 'static' : 'dynamic';
5978 var periodStart = playlist.sidx.timeline;
5979 var presentationTime = periodStart;
5980 var number = playlist.mediaSequence || 0; // firstOffset is the offset from the end of the sidx box
5981
5982 var startIndex; // eslint-disable-next-line
5983
5984 if (typeof sidx.firstOffset === 'bigint') {
5985 startIndex = window.BigInt(sidxEnd) + sidx.firstOffset;
5986 } else {
5987 startIndex = sidxEnd + sidx.firstOffset;
5988 }
5989
5990 for (var i = 0; i < mediaReferences.length; i++) {
5991 var reference = sidx.references[i]; // size of the referenced (sub)segment
5992
5993 var size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale
5994 // this will be converted to seconds when generating segments
5995
5996 var duration = reference.subsegmentDuration; // should be an inclusive range
5997
5998 var endIndex = void 0; // eslint-disable-next-line
5999
6000 if (typeof startIndex === 'bigint') {
6001 endIndex = startIndex + window.BigInt(size) - window.BigInt(1);
6002 } else {
6003 endIndex = startIndex + size - 1;
6004 }
6005
6006 var indexRange = startIndex + "-" + endIndex;
6007 var attributes = {
6008 baseUrl: baseUrl,
6009 timescale: timescale,
6010 timeline: timeline,
6011 periodStart: periodStart,
6012 presentationTime: presentationTime,
6013 number: number,
6014 duration: duration,
6015 sourceDuration: sourceDuration,
6016 indexRange: indexRange,
6017 type: type
6018 };
6019 var segment = segmentsFromBase(attributes)[0];
6020
6021 if (initSegment) {
6022 segment.map = initSegment;
6023 }
6024
6025 segments.push(segment);
6026
6027 if (typeof startIndex === 'bigint') {
6028 startIndex += window.BigInt(size);
6029 } else {
6030 startIndex += size;
6031 }
6032
6033 presentationTime += duration / timescale;
6034 number++;
6035 }
6036
6037 playlist.segments = segments;
6038 return playlist;
6039 };
6040
6041 var SUPPORTED_MEDIA_TYPES = ['AUDIO', 'SUBTITLES']; // allow one 60fps frame as leniency (arbitrarily chosen)
6042
6043 var TIME_FUDGE = 1 / 60;
6044 /**
6045 * Given a list of timelineStarts, combines, dedupes, and sorts them.
6046 *
6047 * @param {TimelineStart[]} timelineStarts - list of timeline starts
6048 *
6049 * @return {TimelineStart[]} the combined and deduped timeline starts
6050 */
6051
6052 var getUniqueTimelineStarts = function getUniqueTimelineStarts(timelineStarts) {
6053 return union(timelineStarts, function (_ref) {
6054 var timeline = _ref.timeline;
6055 return timeline;
6056 }).sort(function (a, b) {
6057 return a.timeline > b.timeline ? 1 : -1;
6058 });
6059 };
6060 /**
6061 * Finds the playlist with the matching NAME attribute.
6062 *
6063 * @param {Array} playlists - playlists to search through
6064 * @param {string} name - the NAME attribute to search for
6065 *
6066 * @return {Object|null} the matching playlist object, or null
6067 */
6068
6069
6070 var findPlaylistWithName = function findPlaylistWithName(playlists, name) {
6071 for (var i = 0; i < playlists.length; i++) {
6072 if (playlists[i].attributes.NAME === name) {
6073 return playlists[i];
6074 }
6075 }
6076
6077 return null;
6078 };
6079 /**
6080 * Gets a flattened array of media group playlists.
6081 *
6082 * @param {Object} manifest - the main manifest object
6083 *
6084 * @return {Array} the media group playlists
6085 */
6086
6087
6088 var getMediaGroupPlaylists = function getMediaGroupPlaylists(manifest) {
6089 var mediaGroupPlaylists = [];
6090 forEachMediaGroup(manifest, SUPPORTED_MEDIA_TYPES, function (properties, type, group, label) {
6091 mediaGroupPlaylists = mediaGroupPlaylists.concat(properties.playlists || []);
6092 });
6093 return mediaGroupPlaylists;
6094 };
6095 /**
6096 * Updates the playlist's media sequence numbers.
6097 *
6098 * @param {Object} config - options object
6099 * @param {Object} config.playlist - the playlist to update
6100 * @param {number} config.mediaSequence - the mediaSequence number to start with
6101 */
6102
6103
6104 var updateMediaSequenceForPlaylist = function updateMediaSequenceForPlaylist(_ref2) {
6105 var playlist = _ref2.playlist,
6106 mediaSequence = _ref2.mediaSequence;
6107 playlist.mediaSequence = mediaSequence;
6108 playlist.segments.forEach(function (segment, index) {
6109 segment.number = playlist.mediaSequence + index;
6110 });
6111 };
6112 /**
6113 * Updates the media and discontinuity sequence numbers of newPlaylists given oldPlaylists
6114 * and a complete list of timeline starts.
6115 *
6116 * If no matching playlist is found, only the discontinuity sequence number of the playlist
6117 * will be updated.
6118 *
6119 * Since early available timelines are not supported, at least one segment must be present.
6120 *
6121 * @param {Object} config - options object
6122 * @param {Object[]} oldPlaylists - the old playlists to use as a reference
6123 * @param {Object[]} newPlaylists - the new playlists to update
6124 * @param {Object} timelineStarts - all timelineStarts seen in the stream to this point
6125 */
6126
6127
6128 var updateSequenceNumbers = function updateSequenceNumbers(_ref3) {
6129 var oldPlaylists = _ref3.oldPlaylists,
6130 newPlaylists = _ref3.newPlaylists,
6131 timelineStarts = _ref3.timelineStarts;
6132 newPlaylists.forEach(function (playlist) {
6133 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref4) {
6134 var timeline = _ref4.timeline;
6135 return timeline === playlist.timeline;
6136 }); // Playlists NAMEs come from DASH Representation IDs, which are mandatory
6137 // (see ISO_23009-1-2012 5.3.5.2).
6138 //
6139 // If the same Representation existed in a prior Period, it will retain the same NAME.
6140
6141 var oldPlaylist = findPlaylistWithName(oldPlaylists, playlist.attributes.NAME);
6142
6143 if (!oldPlaylist) {
6144 // Since this is a new playlist, the media sequence values can start from 0 without
6145 // consequence.
6146 return;
6147 } // TODO better support for live SIDX
6148 //
6149 // As of this writing, mpd-parser does not support multiperiod SIDX (in live or VOD).
6150 // This is evident by a playlist only having a single SIDX reference. In a multiperiod
6151 // playlist there would need to be multiple SIDX references. In addition, live SIDX is
6152 // not supported when the SIDX properties change on refreshes.
6153 //
6154 // In the future, if support needs to be added, the merging logic here can be called
6155 // after SIDX references are resolved. For now, exit early to prevent exceptions being
6156 // thrown due to undefined references.
6157
6158
6159 if (playlist.sidx) {
6160 return;
6161 } // Since we don't yet support early available timelines, we don't need to support
6162 // playlists with no segments.
6163
6164
6165 var firstNewSegment = playlist.segments[0];
6166 var oldMatchingSegmentIndex = findIndex(oldPlaylist.segments, function (oldSegment) {
6167 return Math.abs(oldSegment.presentationTime - firstNewSegment.presentationTime) < TIME_FUDGE;
6168 }); // No matching segment from the old playlist means the entire playlist was refreshed.
6169 // In this case the media sequence should account for this update, and the new segments
6170 // should be marked as discontinuous from the prior content, since the last prior
6171 // timeline was removed.
6172
6173 if (oldMatchingSegmentIndex === -1) {
6174 updateMediaSequenceForPlaylist({
6175 playlist: playlist,
6176 mediaSequence: oldPlaylist.mediaSequence + oldPlaylist.segments.length
6177 });
6178 playlist.segments[0].discontinuity = true;
6179 playlist.discontinuityStarts.unshift(0); // No matching segment does not necessarily mean there's missing content.
6180 //
6181 // If the new playlist's timeline is the same as the last seen segment's timeline,
6182 // then a discontinuity can be added to identify that there's potentially missing
6183 // content. If there's no missing content, the discontinuity should still be rather
6184 // harmless. It's possible that if segment durations are accurate enough, that the
6185 // existence of a gap can be determined using the presentation times and durations,
6186 // but if the segment timing info is off, it may introduce more problems than simply
6187 // adding the discontinuity.
6188 //
6189 // If the new playlist's timeline is different from the last seen segment's timeline,
6190 // then a discontinuity can be added to identify that this is the first seen segment
6191 // of a new timeline. However, the logic at the start of this function that
6192 // determined the disconinuity sequence by timeline index is now off by one (the
6193 // discontinuity of the newest timeline hasn't yet fallen off the manifest...since
6194 // we added it), so the disconinuity sequence must be decremented.
6195 //
6196 // A period may also have a duration of zero, so the case of no segments is handled
6197 // here even though we don't yet support early available periods.
6198
6199 if (!oldPlaylist.segments.length && playlist.timeline > oldPlaylist.timeline || oldPlaylist.segments.length && playlist.timeline > oldPlaylist.segments[oldPlaylist.segments.length - 1].timeline) {
6200 playlist.discontinuitySequence--;
6201 }
6202
6203 return;
6204 } // If the first segment matched with a prior segment on a discontinuity (it's matching
6205 // on the first segment of a period), then the discontinuitySequence shouldn't be the
6206 // timeline's matching one, but instead should be the one prior, and the first segment
6207 // of the new manifest should be marked with a discontinuity.
6208 //
6209 // The reason for this special case is that discontinuity sequence shows how many
6210 // discontinuities have fallen off of the playlist, and discontinuities are marked on
6211 // the first segment of a new "timeline." Because of this, while DASH will retain that
6212 // Period while the "timeline" exists, HLS keeps track of it via the discontinuity
6213 // sequence, and that first segment is an indicator, but can be removed before that
6214 // timeline is gone.
6215
6216
6217 var oldMatchingSegment = oldPlaylist.segments[oldMatchingSegmentIndex];
6218
6219 if (oldMatchingSegment.discontinuity && !firstNewSegment.discontinuity) {
6220 firstNewSegment.discontinuity = true;
6221 playlist.discontinuityStarts.unshift(0);
6222 playlist.discontinuitySequence--;
6223 }
6224
6225 updateMediaSequenceForPlaylist({
6226 playlist: playlist,
6227 mediaSequence: oldPlaylist.segments[oldMatchingSegmentIndex].number
6228 });
6229 });
6230 };
6231 /**
6232 * Given an old parsed manifest object and a new parsed manifest object, updates the
6233 * sequence and timing values within the new manifest to ensure that it lines up with the
6234 * old.
6235 *
6236 * @param {Array} oldManifest - the old main manifest object
6237 * @param {Array} newManifest - the new main manifest object
6238 *
6239 * @return {Object} the updated new manifest object
6240 */
6241
6242
6243 var positionManifestOnTimeline = function positionManifestOnTimeline(_ref5) {
6244 var oldManifest = _ref5.oldManifest,
6245 newManifest = _ref5.newManifest; // Starting from v4.1.2 of the IOP, section 4.4.3.3 states:
6246 //
6247 // "MPD@availabilityStartTime and Period@start shall not be changed over MPD updates."
6248 //
6249 // This was added from https://github.com/Dash-Industry-Forum/DASH-IF-IOP/issues/160
6250 //
6251 // Because of this change, and the difficulty of supporting periods with changing start
6252 // times, periods with changing start times are not supported. This makes the logic much
6253 // simpler, since periods with the same start time can be considerred the same period
6254 // across refreshes.
6255 //
6256 // To give an example as to the difficulty of handling periods where the start time may
6257 // change, if a single period manifest is refreshed with another manifest with a single
6258 // period, and both the start and end times are increased, then the only way to determine
6259 // if it's a new period or an old one that has changed is to look through the segments of
6260 // each playlist and determine the presentation time bounds to find a match. In addition,
6261 // if the period start changed to exceed the old period end, then there would be no
6262 // match, and it would not be possible to determine whether the refreshed period is a new
6263 // one or the old one.
6264
6265 var oldPlaylists = oldManifest.playlists.concat(getMediaGroupPlaylists(oldManifest));
6266 var newPlaylists = newManifest.playlists.concat(getMediaGroupPlaylists(newManifest)); // Save all seen timelineStarts to the new manifest. Although this potentially means that
6267 // there's a "memory leak" in that it will never stop growing, in reality, only a couple
6268 // of properties are saved for each seen Period. Even long running live streams won't
6269 // generate too many Periods, unless the stream is watched for decades. In the future,
6270 // this can be optimized by mapping to discontinuity sequence numbers for each timeline,
6271 // but it may not become an issue, and the additional info can be useful for debugging.
6272
6273 newManifest.timelineStarts = getUniqueTimelineStarts([oldManifest.timelineStarts, newManifest.timelineStarts]);
6274 updateSequenceNumbers({
6275 oldPlaylists: oldPlaylists,
6276 newPlaylists: newPlaylists,
6277 timelineStarts: newManifest.timelineStarts
6278 });
6279 return newManifest;
6280 };
6281
6282 var generateSidxKey = function generateSidxKey(sidx) {
6283 return sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);
6284 };
6285
6286 var mergeDiscontiguousPlaylists = function mergeDiscontiguousPlaylists(playlists) {
6287 var mergedPlaylists = values(playlists.reduce(function (acc, playlist) {
6288 // assuming playlist IDs are the same across periods
6289 // TODO: handle multiperiod where representation sets are not the same
6290 // across periods
6291 var name = playlist.attributes.id + (playlist.attributes.lang || '');
6292
6293 if (!acc[name]) {
6294 // First Period
6295 acc[name] = playlist;
6296 acc[name].attributes.timelineStarts = [];
6297 } else {
6298 // Subsequent Periods
6299 if (playlist.segments) {
6300 var _acc$name$segments; // first segment of subsequent periods signal a discontinuity
6301
6302
6303 if (playlist.segments[0]) {
6304 playlist.segments[0].discontinuity = true;
6305 }
6306
6307 (_acc$name$segments = acc[name].segments).push.apply(_acc$name$segments, playlist.segments);
6308 } // bubble up contentProtection, this assumes all DRM content
6309 // has the same contentProtection
6310
6311
6312 if (playlist.attributes.contentProtection) {
6313 acc[name].attributes.contentProtection = playlist.attributes.contentProtection;
6314 }
6315 }
6316
6317 acc[name].attributes.timelineStarts.push({
6318 // Although they represent the same number, it's important to have both to make it
6319 // compatible with HLS potentially having a similar attribute.
6320 start: playlist.attributes.periodStart,
6321 timeline: playlist.attributes.periodStart
6322 });
6323 return acc;
6324 }, {}));
6325 return mergedPlaylists.map(function (playlist) {
6326 playlist.discontinuityStarts = findIndexes(playlist.segments || [], 'discontinuity');
6327 return playlist;
6328 });
6329 };
6330
6331 var addSidxSegmentsToPlaylist = function addSidxSegmentsToPlaylist(playlist, sidxMapping) {
6332 var sidxKey = generateSidxKey(playlist.sidx);
6333 var sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;
6334
6335 if (sidxMatch) {
6336 addSidxSegmentsToPlaylist$1(playlist, sidxMatch, playlist.sidx.resolvedUri);
6337 }
6338
6339 return playlist;
6340 };
6341
6342 var addSidxSegmentsToPlaylists = function addSidxSegmentsToPlaylists(playlists, sidxMapping) {
6343 if (sidxMapping === void 0) {
6344 sidxMapping = {};
6345 }
6346
6347 if (!Object.keys(sidxMapping).length) {
6348 return playlists;
6349 }
6350
6351 for (var i in playlists) {
6352 playlists[i] = addSidxSegmentsToPlaylist(playlists[i], sidxMapping);
6353 }
6354
6355 return playlists;
6356 };
6357
6358 var formatAudioPlaylist = function formatAudioPlaylist(_ref, isAudioOnly) {
6359 var _attributes;
6360
6361 var attributes = _ref.attributes,
6362 segments = _ref.segments,
6363 sidx = _ref.sidx,
6364 mediaSequence = _ref.mediaSequence,
6365 discontinuitySequence = _ref.discontinuitySequence,
6366 discontinuityStarts = _ref.discontinuityStarts;
6367 var playlist = {
6368 attributes: (_attributes = {
6369 NAME: attributes.id,
6370 BANDWIDTH: attributes.bandwidth,
6371 CODECS: attributes.codecs
6372 }, _attributes['PROGRAM-ID'] = 1, _attributes),
6373 uri: '',
6374 endList: attributes.type === 'static',
6375 timeline: attributes.periodStart,
6376 resolvedUri: '',
6377 targetDuration: attributes.duration,
6378 discontinuitySequence: discontinuitySequence,
6379 discontinuityStarts: discontinuityStarts,
6380 timelineStarts: attributes.timelineStarts,
6381 mediaSequence: mediaSequence,
6382 segments: segments
6383 };
6384
6385 if (attributes.contentProtection) {
6386 playlist.contentProtection = attributes.contentProtection;
6387 }
6388
6389 if (sidx) {
6390 playlist.sidx = sidx;
6391 }
6392
6393 if (isAudioOnly) {
6394 playlist.attributes.AUDIO = 'audio';
6395 playlist.attributes.SUBTITLES = 'subs';
6396 }
6397
6398 return playlist;
6399 };
6400
6401 var formatVttPlaylist = function formatVttPlaylist(_ref2) {
6402 var _m3u8Attributes;
6403
6404 var attributes = _ref2.attributes,
6405 segments = _ref2.segments,
6406 mediaSequence = _ref2.mediaSequence,
6407 discontinuityStarts = _ref2.discontinuityStarts,
6408 discontinuitySequence = _ref2.discontinuitySequence;
6409
6410 if (typeof segments === 'undefined') {
6411 // vtt tracks may use single file in BaseURL
6412 segments = [{
6413 uri: attributes.baseUrl,
6414 timeline: attributes.periodStart,
6415 resolvedUri: attributes.baseUrl || '',
6416 duration: attributes.sourceDuration,
6417 number: 0
6418 }]; // targetDuration should be the same duration as the only segment
6419
6420 attributes.duration = attributes.sourceDuration;
6421 }
6422
6423 var m3u8Attributes = (_m3u8Attributes = {
6424 NAME: attributes.id,
6425 BANDWIDTH: attributes.bandwidth
6426 }, _m3u8Attributes['PROGRAM-ID'] = 1, _m3u8Attributes);
6427
6428 if (attributes.codecs) {
6429 m3u8Attributes.CODECS = attributes.codecs;
6430 }
6431
6432 return {
6433 attributes: m3u8Attributes,
6434 uri: '',
6435 endList: attributes.type === 'static',
6436 timeline: attributes.periodStart,
6437 resolvedUri: attributes.baseUrl || '',
6438 targetDuration: attributes.duration,
6439 timelineStarts: attributes.timelineStarts,
6440 discontinuityStarts: discontinuityStarts,
6441 discontinuitySequence: discontinuitySequence,
6442 mediaSequence: mediaSequence,
6443 segments: segments
6444 };
6445 };
6446
6447 var organizeAudioPlaylists = function organizeAudioPlaylists(playlists, sidxMapping, isAudioOnly) {
6448 if (sidxMapping === void 0) {
6449 sidxMapping = {};
6450 }
6451
6452 if (isAudioOnly === void 0) {
6453 isAudioOnly = false;
6454 }
6455
6456 var mainPlaylist;
6457 var formattedPlaylists = playlists.reduce(function (a, playlist) {
6458 var role = playlist.attributes.role && playlist.attributes.role.value || '';
6459 var language = playlist.attributes.lang || '';
6460 var label = playlist.attributes.label || 'main';
6461
6462 if (language && !playlist.attributes.label) {
6463 var roleLabel = role ? " (" + role + ")" : '';
6464 label = "" + playlist.attributes.lang + roleLabel;
6465 }
6466
6467 if (!a[label]) {
6468 a[label] = {
6469 language: language,
6470 autoselect: true,
6471 default: role === 'main',
6472 playlists: [],
6473 uri: ''
6474 };
6475 }
6476
6477 var formatted = addSidxSegmentsToPlaylist(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);
6478 a[label].playlists.push(formatted);
6479
6480 if (typeof mainPlaylist === 'undefined' && role === 'main') {
6481 mainPlaylist = playlist;
6482 mainPlaylist.default = true;
6483 }
6484
6485 return a;
6486 }, {}); // if no playlists have role "main", mark the first as main
6487
6488 if (!mainPlaylist) {
6489 var firstLabel = Object.keys(formattedPlaylists)[0];
6490 formattedPlaylists[firstLabel].default = true;
6491 }
6492
6493 return formattedPlaylists;
6494 };
6495
6496 var organizeVttPlaylists = function organizeVttPlaylists(playlists, sidxMapping) {
6497 if (sidxMapping === void 0) {
6498 sidxMapping = {};
6499 }
6500
6501 return playlists.reduce(function (a, playlist) {
6502 var label = playlist.attributes.lang || 'text';
6503
6504 if (!a[label]) {
6505 a[label] = {
6506 language: label,
6507 default: false,
6508 autoselect: false,
6509 playlists: [],
6510 uri: ''
6511 };
6512 }
6513
6514 a[label].playlists.push(addSidxSegmentsToPlaylist(formatVttPlaylist(playlist), sidxMapping));
6515 return a;
6516 }, {});
6517 };
6518
6519 var organizeCaptionServices = function organizeCaptionServices(captionServices) {
6520 return captionServices.reduce(function (svcObj, svc) {
6521 if (!svc) {
6522 return svcObj;
6523 }
6524
6525 svc.forEach(function (service) {
6526 var channel = service.channel,
6527 language = service.language;
6528 svcObj[language] = {
6529 autoselect: false,
6530 default: false,
6531 instreamId: channel,
6532 language: language
6533 };
6534
6535 if (service.hasOwnProperty('aspectRatio')) {
6536 svcObj[language].aspectRatio = service.aspectRatio;
6537 }
6538
6539 if (service.hasOwnProperty('easyReader')) {
6540 svcObj[language].easyReader = service.easyReader;
6541 }
6542
6543 if (service.hasOwnProperty('3D')) {
6544 svcObj[language]['3D'] = service['3D'];
6545 }
6546 });
6547 return svcObj;
6548 }, {});
6549 };
6550
6551 var formatVideoPlaylist = function formatVideoPlaylist(_ref3) {
6552 var _attributes2;
6553
6554 var attributes = _ref3.attributes,
6555 segments = _ref3.segments,
6556 sidx = _ref3.sidx,
6557 discontinuityStarts = _ref3.discontinuityStarts;
6558 var playlist = {
6559 attributes: (_attributes2 = {
6560 NAME: attributes.id,
6561 AUDIO: 'audio',
6562 SUBTITLES: 'subs',
6563 RESOLUTION: {
6564 width: attributes.width,
6565 height: attributes.height
6566 },
6567 CODECS: attributes.codecs,
6568 BANDWIDTH: attributes.bandwidth
6569 }, _attributes2['PROGRAM-ID'] = 1, _attributes2),
6570 uri: '',
6571 endList: attributes.type === 'static',
6572 timeline: attributes.periodStart,
6573 resolvedUri: '',
6574 targetDuration: attributes.duration,
6575 discontinuityStarts: discontinuityStarts,
6576 timelineStarts: attributes.timelineStarts,
6577 segments: segments
6578 };
6579
6580 if (attributes.frameRate) {
6581 playlist.attributes['FRAME-RATE'] = attributes.frameRate;
6582 }
6583
6584 if (attributes.contentProtection) {
6585 playlist.contentProtection = attributes.contentProtection;
6586 }
6587
6588 if (sidx) {
6589 playlist.sidx = sidx;
6590 }
6591
6592 return playlist;
6593 };
6594
6595 var videoOnly = function videoOnly(_ref4) {
6596 var attributes = _ref4.attributes;
6597 return attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';
6598 };
6599
6600 var audioOnly = function audioOnly(_ref5) {
6601 var attributes = _ref5.attributes;
6602 return attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';
6603 };
6604
6605 var vttOnly = function vttOnly(_ref6) {
6606 var attributes = _ref6.attributes;
6607 return attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';
6608 };
6609 /**
6610 * Contains start and timeline properties denoting a timeline start. For DASH, these will
6611 * be the same number.
6612 *
6613 * @typedef {Object} TimelineStart
6614 * @property {number} start - the start time of the timeline
6615 * @property {number} timeline - the timeline number
6616 */
6617
6618 /**
6619 * Adds appropriate media and discontinuity sequence values to the segments and playlists.
6620 *
6621 * Throughout mpd-parser, the `number` attribute is used in relation to `startNumber`, a
6622 * DASH specific attribute used in constructing segment URI's from templates. However, from
6623 * an HLS perspective, the `number` attribute on a segment would be its `mediaSequence`
6624 * value, which should start at the original media sequence value (or 0) and increment by 1
6625 * for each segment thereafter. Since DASH's `startNumber` values are independent per
6626 * period, it doesn't make sense to use it for `number`. Instead, assume everything starts
6627 * from a 0 mediaSequence value and increment from there.
6628 *
6629 * Note that VHS currently doesn't use the `number` property, but it can be helpful for
6630 * debugging and making sense of the manifest.
6631 *
6632 * For live playlists, to account for values increasing in manifests when periods are
6633 * removed on refreshes, merging logic should be used to update the numbers to their
6634 * appropriate values (to ensure they're sequential and increasing).
6635 *
6636 * @param {Object[]} playlists - the playlists to update
6637 * @param {TimelineStart[]} timelineStarts - the timeline starts for the manifest
6638 */
6639
6640
6641 var addMediaSequenceValues = function addMediaSequenceValues(playlists, timelineStarts) {
6642 // increment all segments sequentially
6643 playlists.forEach(function (playlist) {
6644 playlist.mediaSequence = 0;
6645 playlist.discontinuitySequence = findIndex(timelineStarts, function (_ref7) {
6646 var timeline = _ref7.timeline;
6647 return timeline === playlist.timeline;
6648 });
6649
6650 if (!playlist.segments) {
6651 return;
6652 }
6653
6654 playlist.segments.forEach(function (segment, index) {
6655 segment.number = index;
6656 });
6657 });
6658 };
6659 /**
6660 * Given a media group object, flattens all playlists within the media group into a single
6661 * array.
6662 *
6663 * @param {Object} mediaGroupObject - the media group object
6664 *
6665 * @return {Object[]}
6666 * The media group playlists
6667 */
6668
6669
6670 var flattenMediaGroupPlaylists = function flattenMediaGroupPlaylists(mediaGroupObject) {
6671 if (!mediaGroupObject) {
6672 return [];
6673 }
6674
6675 return Object.keys(mediaGroupObject).reduce(function (acc, label) {
6676 var labelContents = mediaGroupObject[label];
6677 return acc.concat(labelContents.playlists);
6678 }, []);
6679 };
6680
6681 var toM3u8 = function toM3u8(_ref8) {
6682 var _mediaGroups;
6683
6684 var dashPlaylists = _ref8.dashPlaylists,
6685 locations = _ref8.locations,
6686 _ref8$sidxMapping = _ref8.sidxMapping,
6687 sidxMapping = _ref8$sidxMapping === void 0 ? {} : _ref8$sidxMapping,
6688 previousManifest = _ref8.previousManifest;
6689
6690 if (!dashPlaylists.length) {
6691 return {};
6692 } // grab all main manifest attributes
6693
6694
6695 var _dashPlaylists$0$attr = dashPlaylists[0].attributes,
6696 duration = _dashPlaylists$0$attr.sourceDuration,
6697 type = _dashPlaylists$0$attr.type,
6698 suggestedPresentationDelay = _dashPlaylists$0$attr.suggestedPresentationDelay,
6699 minimumUpdatePeriod = _dashPlaylists$0$attr.minimumUpdatePeriod;
6700 var videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);
6701 var audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));
6702 var vttPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(vttOnly));
6703 var captions = dashPlaylists.map(function (playlist) {
6704 return playlist.attributes.captionServices;
6705 }).filter(Boolean);
6706 var manifest = {
6707 allowCache: true,
6708 discontinuityStarts: [],
6709 segments: [],
6710 endList: true,
6711 mediaGroups: (_mediaGroups = {
6712 AUDIO: {},
6713 VIDEO: {}
6714 }, _mediaGroups['CLOSED-CAPTIONS'] = {}, _mediaGroups.SUBTITLES = {}, _mediaGroups),
6715 uri: '',
6716 duration: duration,
6717 playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)
6718 };
6719
6720 if (minimumUpdatePeriod >= 0) {
6721 manifest.minimumUpdatePeriod = minimumUpdatePeriod * 1000;
6722 }
6723
6724 if (locations) {
6725 manifest.locations = locations;
6726 }
6727
6728 if (type === 'dynamic') {
6729 manifest.suggestedPresentationDelay = suggestedPresentationDelay;
6730 }
6731
6732 var isAudioOnly = manifest.playlists.length === 0;
6733 var organizedAudioGroup = audioPlaylists.length ? organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly) : null;
6734 var organizedVttGroup = vttPlaylists.length ? organizeVttPlaylists(vttPlaylists, sidxMapping) : null;
6735 var formattedPlaylists = videoPlaylists.concat(flattenMediaGroupPlaylists(organizedAudioGroup), flattenMediaGroupPlaylists(organizedVttGroup));
6736 var playlistTimelineStarts = formattedPlaylists.map(function (_ref9) {
6737 var timelineStarts = _ref9.timelineStarts;
6738 return timelineStarts;
6739 });
6740 manifest.timelineStarts = getUniqueTimelineStarts(playlistTimelineStarts);
6741 addMediaSequenceValues(formattedPlaylists, manifest.timelineStarts);
6742
6743 if (organizedAudioGroup) {
6744 manifest.mediaGroups.AUDIO.audio = organizedAudioGroup;
6745 }
6746
6747 if (organizedVttGroup) {
6748 manifest.mediaGroups.SUBTITLES.subs = organizedVttGroup;
6749 }
6750
6751 if (captions.length) {
6752 manifest.mediaGroups['CLOSED-CAPTIONS'].cc = organizeCaptionServices(captions);
6753 }
6754
6755 if (previousManifest) {
6756 return positionManifestOnTimeline({
6757 oldManifest: previousManifest,
6758 newManifest: manifest
6759 });
6760 }
6761
6762 return manifest;
6763 };
6764 /**
6765 * Calculates the R (repetition) value for a live stream (for the final segment
6766 * in a manifest where the r value is negative 1)
6767 *
6768 * @param {Object} attributes
6769 * Object containing all inherited attributes from parent elements with attribute
6770 * names as keys
6771 * @param {number} time
6772 * current time (typically the total time up until the final segment)
6773 * @param {number} duration
6774 * duration property for the given <S />
6775 *
6776 * @return {number}
6777 * R value to reach the end of the given period
6778 */
6779
6780
6781 var getLiveRValue = function getLiveRValue(attributes, time, duration) {
6782 var NOW = attributes.NOW,
6783 clientOffset = attributes.clientOffset,
6784 availabilityStartTime = attributes.availabilityStartTime,
6785 _attributes$timescale = attributes.timescale,
6786 timescale = _attributes$timescale === void 0 ? 1 : _attributes$timescale,
6787 _attributes$periodSta = attributes.periodStart,
6788 periodStart = _attributes$periodSta === void 0 ? 0 : _attributes$periodSta,
6789 _attributes$minimumUp = attributes.minimumUpdatePeriod,
6790 minimumUpdatePeriod = _attributes$minimumUp === void 0 ? 0 : _attributes$minimumUp;
6791 var now = (NOW + clientOffset) / 1000;
6792 var periodStartWC = availabilityStartTime + periodStart;
6793 var periodEndWC = now + minimumUpdatePeriod;
6794 var periodDuration = periodEndWC - periodStartWC;
6795 return Math.ceil((periodDuration * timescale - time) / duration);
6796 };
6797 /**
6798 * Uses information provided by SegmentTemplate.SegmentTimeline to determine segment
6799 * timing and duration
6800 *
6801 * @param {Object} attributes
6802 * Object containing all inherited attributes from parent elements with attribute
6803 * names as keys
6804 * @param {Object[]} segmentTimeline
6805 * List of objects representing the attributes of each S element contained within
6806 *
6807 * @return {{number: number, duration: number, time: number, timeline: number}[]}
6808 * List of Objects with segment timing and duration info
6809 */
6810
6811
6812 var parseByTimeline = function parseByTimeline(attributes, segmentTimeline) {
6813 var type = attributes.type,
6814 _attributes$minimumUp2 = attributes.minimumUpdatePeriod,
6815 minimumUpdatePeriod = _attributes$minimumUp2 === void 0 ? 0 : _attributes$minimumUp2,
6816 _attributes$media = attributes.media,
6817 media = _attributes$media === void 0 ? '' : _attributes$media,
6818 sourceDuration = attributes.sourceDuration,
6819 _attributes$timescale2 = attributes.timescale,
6820 timescale = _attributes$timescale2 === void 0 ? 1 : _attributes$timescale2,
6821 _attributes$startNumb = attributes.startNumber,
6822 startNumber = _attributes$startNumb === void 0 ? 1 : _attributes$startNumb,
6823 timeline = attributes.periodStart;
6824 var segments = [];
6825 var time = -1;
6826
6827 for (var sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {
6828 var S = segmentTimeline[sIndex];
6829 var duration = S.d;
6830 var repeat = S.r || 0;
6831 var segmentTime = S.t || 0;
6832
6833 if (time < 0) {
6834 // first segment
6835 time = segmentTime;
6836 }
6837
6838 if (segmentTime && segmentTime > time) {
6839 // discontinuity
6840 // TODO: How to handle this type of discontinuity
6841 // timeline++ here would treat it like HLS discontuity and content would
6842 // get appended without gap
6843 // E.G.
6844 // <S t="0" d="1" />
6845 // <S d="1" />
6846 // <S d="1" />
6847 // <S t="5" d="1" />
6848 // would have $Time$ values of [0, 1, 2, 5]
6849 // should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)
6850 // or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)
6851 // does the value of sourceDuration consider this when calculating arbitrary
6852 // negative @r repeat value?
6853 // E.G. Same elements as above with this added at the end
6854 // <S d="1" r="-1" />
6855 // with a sourceDuration of 10
6856 // Would the 2 gaps be included in the time duration calculations resulting in
6857 // 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments
6858 // with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?
6859 time = segmentTime;
6860 }
6861
6862 var count = void 0;
6863
6864 if (repeat < 0) {
6865 var nextS = sIndex + 1;
6866
6867 if (nextS === segmentTimeline.length) {
6868 // last segment
6869 if (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {
6870 count = getLiveRValue(attributes, time, duration);
6871 } else {
6872 // TODO: This may be incorrect depending on conclusion of TODO above
6873 count = (sourceDuration * timescale - time) / duration;
6874 }
6875 } else {
6876 count = (segmentTimeline[nextS].t - time) / duration;
6877 }
6878 } else {
6879 count = repeat + 1;
6880 }
6881
6882 var end = startNumber + segments.length + count;
6883 var number = startNumber + segments.length;
6884
6885 while (number < end) {
6886 segments.push({
6887 number: number,
6888 duration: duration / timescale,
6889 time: time,
6890 timeline: timeline
6891 });
6892 time += duration;
6893 number++;
6894 }
6895 }
6896
6897 return segments;
6898 };
6899
6900 var identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;
6901 /**
6902 * Replaces template identifiers with corresponding values. To be used as the callback
6903 * for String.prototype.replace
6904 *
6905 * @name replaceCallback
6906 * @function
6907 * @param {string} match
6908 * Entire match of identifier
6909 * @param {string} identifier
6910 * Name of matched identifier
6911 * @param {string} format
6912 * Format tag string. Its presence indicates that padding is expected
6913 * @param {string} width
6914 * Desired length of the replaced value. Values less than this width shall be left
6915 * zero padded
6916 * @return {string}
6917 * Replacement for the matched identifier
6918 */
6919
6920 /**
6921 * Returns a function to be used as a callback for String.prototype.replace to replace
6922 * template identifiers
6923 *
6924 * @param {Obect} values
6925 * Object containing values that shall be used to replace known identifiers
6926 * @param {number} values.RepresentationID
6927 * Value of the Representation@id attribute
6928 * @param {number} values.Number
6929 * Number of the corresponding segment
6930 * @param {number} values.Bandwidth
6931 * Value of the Representation@bandwidth attribute.
6932 * @param {number} values.Time
6933 * Timestamp value of the corresponding segment
6934 * @return {replaceCallback}
6935 * Callback to be used with String.prototype.replace to replace identifiers
6936 */
6937
6938 var identifierReplacement = function identifierReplacement(values) {
6939 return function (match, identifier, format, width) {
6940 if (match === '$$') {
6941 // escape sequence
6942 return '$';
6943 }
6944
6945 if (typeof values[identifier] === 'undefined') {
6946 return match;
6947 }
6948
6949 var value = '' + values[identifier];
6950
6951 if (identifier === 'RepresentationID') {
6952 // Format tag shall not be present with RepresentationID
6953 return value;
6954 }
6955
6956 if (!format) {
6957 width = 1;
6958 } else {
6959 width = parseInt(width, 10);
6960 }
6961
6962 if (value.length >= width) {
6963 return value;
6964 }
6965
6966 return "" + new Array(width - value.length + 1).join('0') + value;
6967 };
6968 };
6969 /**
6970 * Constructs a segment url from a template string
6971 *
6972 * @param {string} url
6973 * Template string to construct url from
6974 * @param {Obect} values
6975 * Object containing values that shall be used to replace known identifiers
6976 * @param {number} values.RepresentationID
6977 * Value of the Representation@id attribute
6978 * @param {number} values.Number
6979 * Number of the corresponding segment
6980 * @param {number} values.Bandwidth
6981 * Value of the Representation@bandwidth attribute.
6982 * @param {number} values.Time
6983 * Timestamp value of the corresponding segment
6984 * @return {string}
6985 * Segment url with identifiers replaced
6986 */
6987
6988
6989 var constructTemplateUrl = function constructTemplateUrl(url, values) {
6990 return url.replace(identifierPattern, identifierReplacement(values));
6991 };
6992 /**
6993 * Generates a list of objects containing timing and duration information about each
6994 * segment needed to generate segment uris and the complete segment object
6995 *
6996 * @param {Object} attributes
6997 * Object containing all inherited attributes from parent elements with attribute
6998 * names as keys
6999 * @param {Object[]|undefined} segmentTimeline
7000 * List of objects representing the attributes of each S element contained within
7001 * the SegmentTimeline element
7002 * @return {{number: number, duration: number, time: number, timeline: number}[]}
7003 * List of Objects with segment timing and duration info
7004 */
7005
7006
7007 var parseTemplateInfo = function parseTemplateInfo(attributes, segmentTimeline) {
7008 if (!attributes.duration && !segmentTimeline) {
7009 // if neither @duration or SegmentTimeline are present, then there shall be exactly
7010 // one media segment
7011 return [{
7012 number: attributes.startNumber || 1,
7013 duration: attributes.sourceDuration,
7014 time: 0,
7015 timeline: attributes.periodStart
7016 }];
7017 }
7018
7019 if (attributes.duration) {
7020 return parseByDuration(attributes);
7021 }
7022
7023 return parseByTimeline(attributes, segmentTimeline);
7024 };
7025 /**
7026 * Generates a list of segments using information provided by the SegmentTemplate element
7027 *
7028 * @param {Object} attributes
7029 * Object containing all inherited attributes from parent elements with attribute
7030 * names as keys
7031 * @param {Object[]|undefined} segmentTimeline
7032 * List of objects representing the attributes of each S element contained within
7033 * the SegmentTimeline element
7034 * @return {Object[]}
7035 * List of segment objects
7036 */
7037
7038
7039 var segmentsFromTemplate = function segmentsFromTemplate(attributes, segmentTimeline) {
7040 var templateValues = {
7041 RepresentationID: attributes.id,
7042 Bandwidth: attributes.bandwidth || 0
7043 };
7044 var _attributes$initializ = attributes.initialization,
7045 initialization = _attributes$initializ === void 0 ? {
7046 sourceURL: '',
7047 range: ''
7048 } : _attributes$initializ;
7049 var mapSegment = urlTypeToSegment({
7050 baseUrl: attributes.baseUrl,
7051 source: constructTemplateUrl(initialization.sourceURL, templateValues),
7052 range: initialization.range
7053 });
7054 var segments = parseTemplateInfo(attributes, segmentTimeline);
7055 return segments.map(function (segment) {
7056 templateValues.Number = segment.number;
7057 templateValues.Time = segment.time;
7058 var uri = constructTemplateUrl(attributes.media || '', templateValues); // See DASH spec section 5.3.9.2.2
7059 // - if timescale isn't present on any level, default to 1.
7060
7061 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7062
7063 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7064 var presentationTime = // Even if the @t attribute is not specified for the segment, segment.time is
7065 // calculated in mpd-parser prior to this, so it's assumed to be available.
7066 attributes.periodStart + (segment.time - presentationTimeOffset) / timescale;
7067 var map = {
7068 uri: uri,
7069 timeline: segment.timeline,
7070 duration: segment.duration,
7071 resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),
7072 map: mapSegment,
7073 number: segment.number,
7074 presentationTime: presentationTime
7075 };
7076 return map;
7077 });
7078 };
7079 /**
7080 * Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)
7081 * to an object that matches the output of a segment in videojs/mpd-parser
7082 *
7083 * @param {Object} attributes
7084 * Object containing all inherited attributes from parent elements with attribute
7085 * names as keys
7086 * @param {Object} segmentUrl
7087 * <SegmentURL> node to translate into a segment object
7088 * @return {Object} translated segment object
7089 */
7090
7091
7092 var SegmentURLToSegmentObject = function SegmentURLToSegmentObject(attributes, segmentUrl) {
7093 var baseUrl = attributes.baseUrl,
7094 _attributes$initializ = attributes.initialization,
7095 initialization = _attributes$initializ === void 0 ? {} : _attributes$initializ;
7096 var initSegment = urlTypeToSegment({
7097 baseUrl: baseUrl,
7098 source: initialization.sourceURL,
7099 range: initialization.range
7100 });
7101 var segment = urlTypeToSegment({
7102 baseUrl: baseUrl,
7103 source: segmentUrl.media,
7104 range: segmentUrl.mediaRange
7105 });
7106 segment.map = initSegment;
7107 return segment;
7108 };
7109 /**
7110 * Generates a list of segments using information provided by the SegmentList element
7111 * SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each
7112 * node should be translated into segment.
7113 *
7114 * @param {Object} attributes
7115 * Object containing all inherited attributes from parent elements with attribute
7116 * names as keys
7117 * @param {Object[]|undefined} segmentTimeline
7118 * List of objects representing the attributes of each S element contained within
7119 * the SegmentTimeline element
7120 * @return {Object.<Array>} list of segments
7121 */
7122
7123
7124 var segmentsFromList = function segmentsFromList(attributes, segmentTimeline) {
7125 var duration = attributes.duration,
7126 _attributes$segmentUr = attributes.segmentUrls,
7127 segmentUrls = _attributes$segmentUr === void 0 ? [] : _attributes$segmentUr,
7128 periodStart = attributes.periodStart; // Per spec (5.3.9.2.1) no way to determine segment duration OR
7129 // if both SegmentTimeline and @duration are defined, it is outside of spec.
7130
7131 if (!duration && !segmentTimeline || duration && segmentTimeline) {
7132 throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);
7133 }
7134
7135 var segmentUrlMap = segmentUrls.map(function (segmentUrlObject) {
7136 return SegmentURLToSegmentObject(attributes, segmentUrlObject);
7137 });
7138 var segmentTimeInfo;
7139
7140 if (duration) {
7141 segmentTimeInfo = parseByDuration(attributes);
7142 }
7143
7144 if (segmentTimeline) {
7145 segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);
7146 }
7147
7148 var segments = segmentTimeInfo.map(function (segmentTime, index) {
7149 if (segmentUrlMap[index]) {
7150 var segment = segmentUrlMap[index]; // See DASH spec section 5.3.9.2.2
7151 // - if timescale isn't present on any level, default to 1.
7152
7153 var timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0
7154
7155 var presentationTimeOffset = attributes.presentationTimeOffset || 0;
7156 segment.timeline = segmentTime.timeline;
7157 segment.duration = segmentTime.duration;
7158 segment.number = segmentTime.number;
7159 segment.presentationTime = periodStart + (segmentTime.time - presentationTimeOffset) / timescale;
7160 return segment;
7161 } // Since we're mapping we should get rid of any blank segments (in case
7162 // the given SegmentTimeline is handling for more elements than we have
7163 // SegmentURLs for).
7164
7165 }).filter(function (segment) {
7166 return segment;
7167 });
7168 return segments;
7169 };
7170
7171 var generateSegments = function generateSegments(_ref) {
7172 var attributes = _ref.attributes,
7173 segmentInfo = _ref.segmentInfo;
7174 var segmentAttributes;
7175 var segmentsFn;
7176
7177 if (segmentInfo.template) {
7178 segmentsFn = segmentsFromTemplate;
7179 segmentAttributes = merge(attributes, segmentInfo.template);
7180 } else if (segmentInfo.base) {
7181 segmentsFn = segmentsFromBase;
7182 segmentAttributes = merge(attributes, segmentInfo.base);
7183 } else if (segmentInfo.list) {
7184 segmentsFn = segmentsFromList;
7185 segmentAttributes = merge(attributes, segmentInfo.list);
7186 }
7187
7188 var segmentsInfo = {
7189 attributes: attributes
7190 };
7191
7192 if (!segmentsFn) {
7193 return segmentsInfo;
7194 }
7195
7196 var segments = segmentsFn(segmentAttributes, segmentInfo.segmentTimeline); // The @duration attribute will be used to determin the playlist's targetDuration which
7197 // must be in seconds. Since we've generated the segment list, we no longer need
7198 // @duration to be in @timescale units, so we can convert it here.
7199
7200 if (segmentAttributes.duration) {
7201 var _segmentAttributes = segmentAttributes,
7202 duration = _segmentAttributes.duration,
7203 _segmentAttributes$ti = _segmentAttributes.timescale,
7204 timescale = _segmentAttributes$ti === void 0 ? 1 : _segmentAttributes$ti;
7205 segmentAttributes.duration = duration / timescale;
7206 } else if (segments.length) {
7207 // if there is no @duration attribute, use the largest segment duration as
7208 // as target duration
7209 segmentAttributes.duration = segments.reduce(function (max, segment) {
7210 return Math.max(max, Math.ceil(segment.duration));
7211 }, 0);
7212 } else {
7213 segmentAttributes.duration = 0;
7214 }
7215
7216 segmentsInfo.attributes = segmentAttributes;
7217 segmentsInfo.segments = segments; // This is a sidx box without actual segment information
7218
7219 if (segmentInfo.base && segmentAttributes.indexRange) {
7220 segmentsInfo.sidx = segments[0];
7221 segmentsInfo.segments = [];
7222 }
7223
7224 return segmentsInfo;
7225 };
7226
7227 var toPlaylists = function toPlaylists(representations) {
7228 return representations.map(generateSegments);
7229 };
7230
7231 var findChildren = function findChildren(element, name) {
7232 return from(element.childNodes).filter(function (_ref) {
7233 var tagName = _ref.tagName;
7234 return tagName === name;
7235 });
7236 };
7237
7238 var getContent = function getContent(element) {
7239 return element.textContent.trim();
7240 };
7241 /**
7242 * Converts the provided string that may contain a division operation to a number.
7243 *
7244 * @param {string} value - the provided string value
7245 *
7246 * @return {number} the parsed string value
7247 */
7248
7249
7250 var parseDivisionValue = function parseDivisionValue(value) {
7251 return parseFloat(value.split('/').reduce(function (prev, current) {
7252 return prev / current;
7253 }));
7254 };
7255
7256 var parseDuration = function parseDuration(str) {
7257 var SECONDS_IN_YEAR = 365 * 24 * 60 * 60;
7258 var SECONDS_IN_MONTH = 30 * 24 * 60 * 60;
7259 var SECONDS_IN_DAY = 24 * 60 * 60;
7260 var SECONDS_IN_HOUR = 60 * 60;
7261 var SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1S
7262
7263 var durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;
7264 var match = durationRegex.exec(str);
7265
7266 if (!match) {
7267 return 0;
7268 }
7269
7270 var _match$slice = match.slice(1),
7271 year = _match$slice[0],
7272 month = _match$slice[1],
7273 day = _match$slice[2],
7274 hour = _match$slice[3],
7275 minute = _match$slice[4],
7276 second = _match$slice[5];
7277
7278 return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);
7279 };
7280
7281 var parseDate = function parseDate(str) {
7282 // Date format without timezone according to ISO 8601
7283 // YYY-MM-DDThh:mm:ss.ssssss
7284 var dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is
7285 // expressed by ending with 'Z'
7286
7287 if (dateRegex.test(str)) {
7288 str += 'Z';
7289 }
7290
7291 return Date.parse(str);
7292 };
7293
7294 var parsers = {
7295 /**
7296 * Specifies the duration of the entire Media Presentation. Format is a duration string
7297 * as specified in ISO 8601
7298 *
7299 * @param {string} value
7300 * value of attribute as a string
7301 * @return {number}
7302 * The duration in seconds
7303 */
7304 mediaPresentationDuration: function mediaPresentationDuration(value) {
7305 return parseDuration(value);
7306 },
7307
7308 /**
7309 * Specifies the Segment availability start time for all Segments referred to in this
7310 * MPD. For a dynamic manifest, it specifies the anchor for the earliest availability
7311 * time. Format is a date string as specified in ISO 8601
7312 *
7313 * @param {string} value
7314 * value of attribute as a string
7315 * @return {number}
7316 * The date as seconds from unix epoch
7317 */
7318 availabilityStartTime: function availabilityStartTime(value) {
7319 return parseDate(value) / 1000;
7320 },
7321
7322 /**
7323 * Specifies the smallest period between potential changes to the MPD. Format is a
7324 * duration string as specified in ISO 8601
7325 *
7326 * @param {string} value
7327 * value of attribute as a string
7328 * @return {number}
7329 * The duration in seconds
7330 */
7331 minimumUpdatePeriod: function minimumUpdatePeriod(value) {
7332 return parseDuration(value);
7333 },
7334
7335 /**
7336 * Specifies the suggested presentation delay. Format is a
7337 * duration string as specified in ISO 8601
7338 *
7339 * @param {string} value
7340 * value of attribute as a string
7341 * @return {number}
7342 * The duration in seconds
7343 */
7344 suggestedPresentationDelay: function suggestedPresentationDelay(value) {
7345 return parseDuration(value);
7346 },
7347
7348 /**
7349 * specifices the type of mpd. Can be either "static" or "dynamic"
7350 *
7351 * @param {string} value
7352 * value of attribute as a string
7353 *
7354 * @return {string}
7355 * The type as a string
7356 */
7357 type: function type(value) {
7358 return value;
7359 },
7360
7361 /**
7362 * Specifies the duration of the smallest time shifting buffer for any Representation
7363 * in the MPD. Format is a duration string as specified in ISO 8601
7364 *
7365 * @param {string} value
7366 * value of attribute as a string
7367 * @return {number}
7368 * The duration in seconds
7369 */
7370 timeShiftBufferDepth: function timeShiftBufferDepth(value) {
7371 return parseDuration(value);
7372 },
7373
7374 /**
7375 * Specifies the PeriodStart time of the Period relative to the availabilityStarttime.
7376 * Format is a duration string as specified in ISO 8601
7377 *
7378 * @param {string} value
7379 * value of attribute as a string
7380 * @return {number}
7381 * The duration in seconds
7382 */
7383 start: function start(value) {
7384 return parseDuration(value);
7385 },
7386
7387 /**
7388 * Specifies the width of the visual presentation
7389 *
7390 * @param {string} value
7391 * value of attribute as a string
7392 * @return {number}
7393 * The parsed width
7394 */
7395 width: function width(value) {
7396 return parseInt(value, 10);
7397 },
7398
7399 /**
7400 * Specifies the height of the visual presentation
7401 *
7402 * @param {string} value
7403 * value of attribute as a string
7404 * @return {number}
7405 * The parsed height
7406 */
7407 height: function height(value) {
7408 return parseInt(value, 10);
7409 },
7410
7411 /**
7412 * Specifies the bitrate of the representation
7413 *
7414 * @param {string} value
7415 * value of attribute as a string
7416 * @return {number}
7417 * The parsed bandwidth
7418 */
7419 bandwidth: function bandwidth(value) {
7420 return parseInt(value, 10);
7421 },
7422
7423 /**
7424 * Specifies the frame rate of the representation
7425 *
7426 * @param {string} value
7427 * value of attribute as a string
7428 * @return {number}
7429 * The parsed frame rate
7430 */
7431 frameRate: function frameRate(value) {
7432 return parseDivisionValue(value);
7433 },
7434
7435 /**
7436 * Specifies the number of the first Media Segment in this Representation in the Period
7437 *
7438 * @param {string} value
7439 * value of attribute as a string
7440 * @return {number}
7441 * The parsed number
7442 */
7443 startNumber: function startNumber(value) {
7444 return parseInt(value, 10);
7445 },
7446
7447 /**
7448 * Specifies the timescale in units per seconds
7449 *
7450 * @param {string} value
7451 * value of attribute as a string
7452 * @return {number}
7453 * The parsed timescale
7454 */
7455 timescale: function timescale(value) {
7456 return parseInt(value, 10);
7457 },
7458
7459 /**
7460 * Specifies the presentationTimeOffset.
7461 *
7462 * @param {string} value
7463 * value of the attribute as a string
7464 *
7465 * @return {number}
7466 * The parsed presentationTimeOffset
7467 */
7468 presentationTimeOffset: function presentationTimeOffset(value) {
7469 return parseInt(value, 10);
7470 },
7471
7472 /**
7473 * Specifies the constant approximate Segment duration
7474 * NOTE: The <Period> element also contains an @duration attribute. This duration
7475 * specifies the duration of the Period. This attribute is currently not
7476 * supported by the rest of the parser, however we still check for it to prevent
7477 * errors.
7478 *
7479 * @param {string} value
7480 * value of attribute as a string
7481 * @return {number}
7482 * The parsed duration
7483 */
7484 duration: function duration(value) {
7485 var parsedValue = parseInt(value, 10);
7486
7487 if (isNaN(parsedValue)) {
7488 return parseDuration(value);
7489 }
7490
7491 return parsedValue;
7492 },
7493
7494 /**
7495 * Specifies the Segment duration, in units of the value of the @timescale.
7496 *
7497 * @param {string} value
7498 * value of attribute as a string
7499 * @return {number}
7500 * The parsed duration
7501 */
7502 d: function d(value) {
7503 return parseInt(value, 10);
7504 },
7505
7506 /**
7507 * Specifies the MPD start time, in @timescale units, the first Segment in the series
7508 * starts relative to the beginning of the Period
7509 *
7510 * @param {string} value
7511 * value of attribute as a string
7512 * @return {number}
7513 * The parsed time
7514 */
7515 t: function t(value) {
7516 return parseInt(value, 10);
7517 },
7518
7519 /**
7520 * Specifies the repeat count of the number of following contiguous Segments with the
7521 * same duration expressed by the value of @d
7522 *
7523 * @param {string} value
7524 * value of attribute as a string
7525 * @return {number}
7526 * The parsed number
7527 */
7528 r: function r(value) {
7529 return parseInt(value, 10);
7530 },
7531
7532 /**
7533 * Default parser for all other attributes. Acts as a no-op and just returns the value
7534 * as a string
7535 *
7536 * @param {string} value
7537 * value of attribute as a string
7538 * @return {string}
7539 * Unparsed value
7540 */
7541 DEFAULT: function DEFAULT(value) {
7542 return value;
7543 }
7544 };
7545 /**
7546 * Gets all the attributes and values of the provided node, parses attributes with known
7547 * types, and returns an object with attribute names mapped to values.
7548 *
7549 * @param {Node} el
7550 * The node to parse attributes from
7551 * @return {Object}
7552 * Object with all attributes of el parsed
7553 */
7554
7555 var parseAttributes = function parseAttributes(el) {
7556 if (!(el && el.attributes)) {
7557 return {};
7558 }
7559
7560 return from(el.attributes).reduce(function (a, e) {
7561 var parseFn = parsers[e.name] || parsers.DEFAULT;
7562 a[e.name] = parseFn(e.value);
7563 return a;
7564 }, {});
7565 };
7566
7567 var keySystemsMap = {
7568 'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
7569 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
7570 'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
7571 'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
7572 };
7573 /**
7574 * Builds a list of urls that is the product of the reference urls and BaseURL values
7575 *
7576 * @param {string[]} referenceUrls
7577 * List of reference urls to resolve to
7578 * @param {Node[]} baseUrlElements
7579 * List of BaseURL nodes from the mpd
7580 * @return {string[]}
7581 * List of resolved urls
7582 */
7583
7584 var buildBaseUrls = function buildBaseUrls(referenceUrls, baseUrlElements) {
7585 if (!baseUrlElements.length) {
7586 return referenceUrls;
7587 }
7588
7589 return flatten(referenceUrls.map(function (reference) {
7590 return baseUrlElements.map(function (baseUrlElement) {
7591 return resolveUrl$1(reference, getContent(baseUrlElement));
7592 });
7593 }));
7594 };
7595 /**
7596 * Contains all Segment information for its containing AdaptationSet
7597 *
7598 * @typedef {Object} SegmentInformation
7599 * @property {Object|undefined} template
7600 * Contains the attributes for the SegmentTemplate node
7601 * @property {Object[]|undefined} segmentTimeline
7602 * Contains a list of atrributes for each S node within the SegmentTimeline node
7603 * @property {Object|undefined} list
7604 * Contains the attributes for the SegmentList node
7605 * @property {Object|undefined} base
7606 * Contains the attributes for the SegmentBase node
7607 */
7608
7609 /**
7610 * Returns all available Segment information contained within the AdaptationSet node
7611 *
7612 * @param {Node} adaptationSet
7613 * The AdaptationSet node to get Segment information from
7614 * @return {SegmentInformation}
7615 * The Segment information contained within the provided AdaptationSet
7616 */
7617
7618
7619 var getSegmentInformation = function getSegmentInformation(adaptationSet) {
7620 var segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
7621 var segmentList = findChildren(adaptationSet, 'SegmentList')[0];
7622 var segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(function (s) {
7623 return merge({
7624 tag: 'SegmentURL'
7625 }, parseAttributes(s));
7626 });
7627 var segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
7628 var segmentTimelineParentNode = segmentList || segmentTemplate;
7629 var segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
7630 var segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
7631 var segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both
7632 // @initialization and an <Initialization> node. @initialization can be templated,
7633 // while the node can have a url and range specified. If the <SegmentTemplate> has
7634 // both @initialization and an <Initialization> subelement we opt to override with
7635 // the node, as this interaction is not defined in the spec.
7636
7637 var template = segmentTemplate && parseAttributes(segmentTemplate);
7638
7639 if (template && segmentInitialization) {
7640 template.initialization = segmentInitialization && parseAttributes(segmentInitialization);
7641 } else if (template && template.initialization) {
7642 // If it is @initialization we convert it to an object since this is the format that
7643 // later functions will rely on for the initialization segment. This is only valid
7644 // for <SegmentTemplate>
7645 template.initialization = {
7646 sourceURL: template.initialization
7647 };
7648 }
7649
7650 var segmentInfo = {
7651 template: template,
7652 segmentTimeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(function (s) {
7653 return parseAttributes(s);
7654 }),
7655 list: segmentList && merge(parseAttributes(segmentList), {
7656 segmentUrls: segmentUrls,
7657 initialization: parseAttributes(segmentInitialization)
7658 }),
7659 base: segmentBase && merge(parseAttributes(segmentBase), {
7660 initialization: parseAttributes(segmentInitialization)
7661 })
7662 };
7663 Object.keys(segmentInfo).forEach(function (key) {
7664 if (!segmentInfo[key]) {
7665 delete segmentInfo[key];
7666 }
7667 });
7668 return segmentInfo;
7669 };
7670 /**
7671 * Contains Segment information and attributes needed to construct a Playlist object
7672 * from a Representation
7673 *
7674 * @typedef {Object} RepresentationInformation
7675 * @property {SegmentInformation} segmentInfo
7676 * Segment information for this Representation
7677 * @property {Object} attributes
7678 * Inherited attributes for this Representation
7679 */
7680
7681 /**
7682 * Maps a Representation node to an object containing Segment information and attributes
7683 *
7684 * @name inheritBaseUrlsCallback
7685 * @function
7686 * @param {Node} representation
7687 * Representation node from the mpd
7688 * @return {RepresentationInformation}
7689 * Representation information needed to construct a Playlist object
7690 */
7691
7692 /**
7693 * Returns a callback for Array.prototype.map for mapping Representation nodes to
7694 * Segment information and attributes using inherited BaseURL nodes.
7695 *
7696 * @param {Object} adaptationSetAttributes
7697 * Contains attributes inherited by the AdaptationSet
7698 * @param {string[]} adaptationSetBaseUrls
7699 * Contains list of resolved base urls inherited by the AdaptationSet
7700 * @param {SegmentInformation} adaptationSetSegmentInfo
7701 * Contains Segment information for the AdaptationSet
7702 * @return {inheritBaseUrlsCallback}
7703 * Callback map function
7704 */
7705
7706
7707 var inheritBaseUrls = function inheritBaseUrls(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) {
7708 return function (representation) {
7709 var repBaseUrlElements = findChildren(representation, 'BaseURL');
7710 var repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
7711 var attributes = merge(adaptationSetAttributes, parseAttributes(representation));
7712 var representationSegmentInfo = getSegmentInformation(representation);
7713 return repBaseUrls.map(function (baseUrl) {
7714 return {
7715 segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
7716 attributes: merge(attributes, {
7717 baseUrl: baseUrl
7718 })
7719 };
7720 });
7721 };
7722 };
7723 /**
7724 * Tranforms a series of content protection nodes to
7725 * an object containing pssh data by key system
7726 *
7727 * @param {Node[]} contentProtectionNodes
7728 * Content protection nodes
7729 * @return {Object}
7730 * Object containing pssh data by key system
7731 */
7732
7733
7734 var generateKeySystemInformation = function generateKeySystemInformation(contentProtectionNodes) {
7735 return contentProtectionNodes.reduce(function (acc, node) {
7736 var attributes = parseAttributes(node); // Although it could be argued that according to the UUID RFC spec the UUID string (a-f chars) should be generated
7737 // as a lowercase string it also mentions it should be treated as case-insensitive on input. Since the key system
7738 // UUIDs in the keySystemsMap are hardcoded as lowercase in the codebase there isn't any reason not to do
7739 // .toLowerCase() on the input UUID string from the manifest (at least I could not think of one).
7740
7741 if (attributes.schemeIdUri) {
7742 attributes.schemeIdUri = attributes.schemeIdUri.toLowerCase();
7743 }
7744
7745 var keySystem = keySystemsMap[attributes.schemeIdUri];
7746
7747 if (keySystem) {
7748 acc[keySystem] = {
7749 attributes: attributes
7750 };
7751 var psshNode = findChildren(node, 'cenc:pssh')[0];
7752
7753 if (psshNode) {
7754 var pssh = getContent(psshNode);
7755 acc[keySystem].pssh = pssh && decodeB64ToUint8Array(pssh);
7756 }
7757 }
7758
7759 return acc;
7760 }, {});
7761 }; // defined in ANSI_SCTE 214-1 2016
7762
7763
7764 var parseCaptionServiceMetadata = function parseCaptionServiceMetadata(service) {
7765 // 608 captions
7766 if (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {
7767 var values = typeof service.value !== 'string' ? [] : service.value.split(';');
7768 return values.map(function (value) {
7769 var channel;
7770 var language; // default language to value
7771
7772 language = value;
7773
7774 if (/^CC\d=/.test(value)) {
7775 var _value$split = value.split('=');
7776
7777 channel = _value$split[0];
7778 language = _value$split[1];
7779 } else if (/^CC\d$/.test(value)) {
7780 channel = value;
7781 }
7782
7783 return {
7784 channel: channel,
7785 language: language
7786 };
7787 });
7788 } else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {
7789 var _values = typeof service.value !== 'string' ? [] : service.value.split(';');
7790
7791 return _values.map(function (value) {
7792 var flags = {
7793 // service or channel number 1-63
7794 'channel': undefined,
7795 // language is a 3ALPHA per ISO 639.2/B
7796 // field is required
7797 'language': undefined,
7798 // BIT 1/0 or ?
7799 // default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown
7800 'aspectRatio': 1,
7801 // BIT 1/0
7802 // easy reader flag indicated the text is tailed to the needs of beginning readers
7803 // default 0, or off
7804 'easyReader': 0,
7805 // BIT 1/0
7806 // If 3d metadata is present (CEA-708.1) then 1
7807 // default 0
7808 '3D': 0
7809 };
7810
7811 if (/=/.test(value)) {
7812 var _value$split2 = value.split('='),
7813 channel = _value$split2[0],
7814 _value$split2$ = _value$split2[1],
7815 opts = _value$split2$ === void 0 ? '' : _value$split2$;
7816
7817 flags.channel = channel;
7818 flags.language = value;
7819 opts.split(',').forEach(function (opt) {
7820 var _opt$split = opt.split(':'),
7821 name = _opt$split[0],
7822 val = _opt$split[1];
7823
7824 if (name === 'lang') {
7825 flags.language = val; // er for easyReadery
7826 } else if (name === 'er') {
7827 flags.easyReader = Number(val); // war for wide aspect ratio
7828 } else if (name === 'war') {
7829 flags.aspectRatio = Number(val);
7830 } else if (name === '3D') {
7831 flags['3D'] = Number(val);
7832 }
7833 });
7834 } else {
7835 flags.language = value;
7836 }
7837
7838 if (flags.channel) {
7839 flags.channel = 'SERVICE' + flags.channel;
7840 }
7841
7842 return flags;
7843 });
7844 }
7845 };
7846 /**
7847 * Maps an AdaptationSet node to a list of Representation information objects
7848 *
7849 * @name toRepresentationsCallback
7850 * @function
7851 * @param {Node} adaptationSet
7852 * AdaptationSet node from the mpd
7853 * @return {RepresentationInformation[]}
7854 * List of objects containing Representaion information
7855 */
7856
7857 /**
7858 * Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
7859 * Representation information objects
7860 *
7861 * @param {Object} periodAttributes
7862 * Contains attributes inherited by the Period
7863 * @param {string[]} periodBaseUrls
7864 * Contains list of resolved base urls inherited by the Period
7865 * @param {string[]} periodSegmentInfo
7866 * Contains Segment Information at the period level
7867 * @return {toRepresentationsCallback}
7868 * Callback map function
7869 */
7870
7871
7872 var toRepresentations = function toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo) {
7873 return function (adaptationSet) {
7874 var adaptationSetAttributes = parseAttributes(adaptationSet);
7875 var adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));
7876 var role = findChildren(adaptationSet, 'Role')[0];
7877 var roleAttributes = {
7878 role: parseAttributes(role)
7879 };
7880 var attrs = merge(periodAttributes, adaptationSetAttributes, roleAttributes);
7881 var accessibility = findChildren(adaptationSet, 'Accessibility')[0];
7882 var captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));
7883
7884 if (captionServices) {
7885 attrs = merge(attrs, {
7886 captionServices: captionServices
7887 });
7888 }
7889
7890 var label = findChildren(adaptationSet, 'Label')[0];
7891
7892 if (label && label.childNodes.length) {
7893 var labelVal = label.childNodes[0].nodeValue.trim();
7894 attrs = merge(attrs, {
7895 label: labelVal
7896 });
7897 }
7898
7899 var contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
7900
7901 if (Object.keys(contentProtection).length) {
7902 attrs = merge(attrs, {
7903 contentProtection: contentProtection
7904 });
7905 }
7906
7907 var segmentInfo = getSegmentInformation(adaptationSet);
7908 var representations = findChildren(adaptationSet, 'Representation');
7909 var adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
7910 return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
7911 };
7912 };
7913 /**
7914 * Contains all period information for mapping nodes onto adaptation sets.
7915 *
7916 * @typedef {Object} PeriodInformation
7917 * @property {Node} period.node
7918 * Period node from the mpd
7919 * @property {Object} period.attributes
7920 * Parsed period attributes from node plus any added
7921 */
7922
7923 /**
7924 * Maps a PeriodInformation object to a list of Representation information objects for all
7925 * AdaptationSet nodes contained within the Period.
7926 *
7927 * @name toAdaptationSetsCallback
7928 * @function
7929 * @param {PeriodInformation} period
7930 * Period object containing necessary period information
7931 * @param {number} periodStart
7932 * Start time of the Period within the mpd
7933 * @return {RepresentationInformation[]}
7934 * List of objects containing Representaion information
7935 */
7936
7937 /**
7938 * Returns a callback for Array.prototype.map for mapping Period nodes to a list of
7939 * Representation information objects
7940 *
7941 * @param {Object} mpdAttributes
7942 * Contains attributes inherited by the mpd
7943 * @param {string[]} mpdBaseUrls
7944 * Contains list of resolved base urls inherited by the mpd
7945 * @return {toAdaptationSetsCallback}
7946 * Callback map function
7947 */
7948
7949
7950 var toAdaptationSets = function toAdaptationSets(mpdAttributes, mpdBaseUrls) {
7951 return function (period, index) {
7952 var periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));
7953 var periodAttributes = merge(mpdAttributes, {
7954 periodStart: period.attributes.start
7955 });
7956
7957 if (typeof period.attributes.duration === 'number') {
7958 periodAttributes.periodDuration = period.attributes.duration;
7959 }
7960
7961 var adaptationSets = findChildren(period.node, 'AdaptationSet');
7962 var periodSegmentInfo = getSegmentInformation(period.node);
7963 return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
7964 };
7965 };
7966 /**
7967 * Gets Period@start property for a given period.
7968 *
7969 * @param {Object} options
7970 * Options object
7971 * @param {Object} options.attributes
7972 * Period attributes
7973 * @param {Object} [options.priorPeriodAttributes]
7974 * Prior period attributes (if prior period is available)
7975 * @param {string} options.mpdType
7976 * The MPD@type these periods came from
7977 * @return {number|null}
7978 * The period start, or null if it's an early available period or error
7979 */
7980
7981
7982 var getPeriodStart = function getPeriodStart(_ref) {
7983 var attributes = _ref.attributes,
7984 priorPeriodAttributes = _ref.priorPeriodAttributes,
7985 mpdType = _ref.mpdType; // Summary of period start time calculation from DASH spec section 5.3.2.1
7986 //
7987 // A period's start is the first period's start + time elapsed after playing all
7988 // prior periods to this one. Periods continue one after the other in time (without
7989 // gaps) until the end of the presentation.
7990 //
7991 // The value of Period@start should be:
7992 // 1. if Period@start is present: value of Period@start
7993 // 2. if previous period exists and it has @duration: previous Period@start +
7994 // previous Period@duration
7995 // 3. if this is first period and MPD@type is 'static': 0
7996 // 4. in all other cases, consider the period an "early available period" (note: not
7997 // currently supported)
7998 // (1)
7999
8000 if (typeof attributes.start === 'number') {
8001 return attributes.start;
8002 } // (2)
8003
8004
8005 if (priorPeriodAttributes && typeof priorPeriodAttributes.start === 'number' && typeof priorPeriodAttributes.duration === 'number') {
8006 return priorPeriodAttributes.start + priorPeriodAttributes.duration;
8007 } // (3)
8008
8009
8010 if (!priorPeriodAttributes && mpdType === 'static') {
8011 return 0;
8012 } // (4)
8013 // There is currently no logic for calculating the Period@start value if there is
8014 // no Period@start or prior Period@start and Period@duration available. This is not made
8015 // explicit by the DASH interop guidelines or the DASH spec, however, since there's
8016 // nothing about any other resolution strategies, it's implied. Thus, this case should
8017 // be considered an early available period, or error, and null should suffice for both
8018 // of those cases.
8019
8020
8021 return null;
8022 };
8023 /**
8024 * Traverses the mpd xml tree to generate a list of Representation information objects
8025 * that have inherited attributes from parent nodes
8026 *
8027 * @param {Node} mpd
8028 * The root node of the mpd
8029 * @param {Object} options
8030 * Available options for inheritAttributes
8031 * @param {string} options.manifestUri
8032 * The uri source of the mpd
8033 * @param {number} options.NOW
8034 * Current time per DASH IOP. Default is current time in ms since epoch
8035 * @param {number} options.clientOffset
8036 * Client time difference from NOW (in milliseconds)
8037 * @return {RepresentationInformation[]}
8038 * List of objects containing Representation information
8039 */
8040
8041
8042 var inheritAttributes = function inheritAttributes(mpd, options) {
8043 if (options === void 0) {
8044 options = {};
8045 }
8046
8047 var _options = options,
8048 _options$manifestUri = _options.manifestUri,
8049 manifestUri = _options$manifestUri === void 0 ? '' : _options$manifestUri,
8050 _options$NOW = _options.NOW,
8051 NOW = _options$NOW === void 0 ? Date.now() : _options$NOW,
8052 _options$clientOffset = _options.clientOffset,
8053 clientOffset = _options$clientOffset === void 0 ? 0 : _options$clientOffset;
8054 var periodNodes = findChildren(mpd, 'Period');
8055
8056 if (!periodNodes.length) {
8057 throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
8058 }
8059
8060 var locations = findChildren(mpd, 'Location');
8061 var mpdAttributes = parseAttributes(mpd);
8062 var mpdBaseUrls = buildBaseUrls([manifestUri], findChildren(mpd, 'BaseURL')); // See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.
8063
8064 mpdAttributes.type = mpdAttributes.type || 'static';
8065 mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
8066 mpdAttributes.NOW = NOW;
8067 mpdAttributes.clientOffset = clientOffset;
8068
8069 if (locations.length) {
8070 mpdAttributes.locations = locations.map(getContent);
8071 }
8072
8073 var periods = []; // Since toAdaptationSets acts on individual periods right now, the simplest approach to
8074 // adding properties that require looking at prior periods is to parse attributes and add
8075 // missing ones before toAdaptationSets is called. If more such properties are added, it
8076 // may be better to refactor toAdaptationSets.
8077
8078 periodNodes.forEach(function (node, index) {
8079 var attributes = parseAttributes(node); // Use the last modified prior period, as it may contain added information necessary
8080 // for this period.
8081
8082 var priorPeriod = periods[index - 1];
8083 attributes.start = getPeriodStart({
8084 attributes: attributes,
8085 priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,
8086 mpdType: mpdAttributes.type
8087 });
8088 periods.push({
8089 node: node,
8090 attributes: attributes
8091 });
8092 });
8093 return {
8094 locations: mpdAttributes.locations,
8095 representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)))
8096 };
8097 };
8098
8099 var stringToMpdXml = function stringToMpdXml(manifestString) {
8100 if (manifestString === '') {
8101 throw new Error(errors.DASH_EMPTY_MANIFEST);
8102 }
8103
8104 var parser = new xmldom.DOMParser();
8105 var xml;
8106 var mpd;
8107
8108 try {
8109 xml = parser.parseFromString(manifestString, 'application/xml');
8110 mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;
8111 } catch (e) {// ie 11 throwsw on invalid xml
8112 }
8113
8114 if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {
8115 throw new Error(errors.DASH_INVALID_XML);
8116 }
8117
8118 return mpd;
8119 };
8120 /**
8121 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8122 *
8123 * @param {string} mpd
8124 * XML string of the MPD manifest
8125 * @return {Object|null}
8126 * Attributes of UTCTiming node specified in the manifest. Null if none found
8127 */
8128
8129
8130 var parseUTCTimingScheme = function parseUTCTimingScheme(mpd) {
8131 var UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];
8132
8133 if (!UTCTimingNode) {
8134 return null;
8135 }
8136
8137 var attributes = parseAttributes(UTCTimingNode);
8138
8139 switch (attributes.schemeIdUri) {
8140 case 'urn:mpeg:dash:utc:http-head:2014':
8141 case 'urn:mpeg:dash:utc:http-head:2012':
8142 attributes.method = 'HEAD';
8143 break;
8144
8145 case 'urn:mpeg:dash:utc:http-xsdate:2014':
8146 case 'urn:mpeg:dash:utc:http-iso:2014':
8147 case 'urn:mpeg:dash:utc:http-xsdate:2012':
8148 case 'urn:mpeg:dash:utc:http-iso:2012':
8149 attributes.method = 'GET';
8150 break;
8151
8152 case 'urn:mpeg:dash:utc:direct:2014':
8153 case 'urn:mpeg:dash:utc:direct:2012':
8154 attributes.method = 'DIRECT';
8155 attributes.value = Date.parse(attributes.value);
8156 break;
8157
8158 case 'urn:mpeg:dash:utc:http-ntp:2014':
8159 case 'urn:mpeg:dash:utc:ntp:2014':
8160 case 'urn:mpeg:dash:utc:sntp:2014':
8161 default:
8162 throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);
8163 }
8164
8165 return attributes;
8166 };
8167 /*
8168 * Given a DASH manifest string and options, parses the DASH manifest into an object in the
8169 * form outputed by m3u8-parser and accepted by videojs/http-streaming.
8170 *
8171 * For live DASH manifests, if `previousManifest` is provided in options, then the newly
8172 * parsed DASH manifest will have its media sequence and discontinuity sequence values
8173 * updated to reflect its position relative to the prior manifest.
8174 *
8175 * @param {string} manifestString - the DASH manifest as a string
8176 * @param {options} [options] - any options
8177 *
8178 * @return {Object} the manifest object
8179 */
8180
8181 var parse = function parse(manifestString, options) {
8182 if (options === void 0) {
8183 options = {};
8184 }
8185
8186 var parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);
8187 var playlists = toPlaylists(parsedManifestInfo.representationInfo);
8188 return toM3u8({
8189 dashPlaylists: playlists,
8190 locations: parsedManifestInfo.locations,
8191 sidxMapping: options.sidxMapping,
8192 previousManifest: options.previousManifest
8193 });
8194 };
8195 /**
8196 * Parses the manifest for a UTCTiming node, returning the nodes attributes if found
8197 *
8198 * @param {string} manifestString
8199 * XML string of the MPD manifest
8200 * @return {Object|null}
8201 * Attributes of UTCTiming node specified in the manifest. Null if none found
8202 */
8203
8204
8205 var parseUTCTiming = function parseUTCTiming(manifestString) {
8206 return parseUTCTimingScheme(stringToMpdXml(manifestString));
8207 };
8208
8209 var MAX_UINT32 = Math.pow(2, 32);
8210
8211 var getUint64$1 = function getUint64(uint8) {
8212 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
8213 var value;
8214
8215 if (dv.getBigUint64) {
8216 value = dv.getBigUint64(0);
8217
8218 if (value < Number.MAX_SAFE_INTEGER) {
8219 return Number(value);
8220 }
8221
8222 return value;
8223 }
8224
8225 return dv.getUint32(0) * MAX_UINT32 + dv.getUint32(4);
8226 };
8227
8228 var numbers = {
8229 getUint64: getUint64$1,
8230 MAX_UINT32: MAX_UINT32
8231 };
8232
8233 var getUint64 = numbers.getUint64;
8234
8235 var parseSidx = function parseSidx(data) {
8236 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
8237 result = {
8238 version: data[0],
8239 flags: new Uint8Array(data.subarray(1, 4)),
8240 references: [],
8241 referenceId: view.getUint32(4),
8242 timescale: view.getUint32(8)
8243 },
8244 i = 12;
8245
8246 if (result.version === 0) {
8247 result.earliestPresentationTime = view.getUint32(i);
8248 result.firstOffset = view.getUint32(i + 4);
8249 i += 8;
8250 } else {
8251 // read 64 bits
8252 result.earliestPresentationTime = getUint64(data.subarray(i));
8253 result.firstOffset = getUint64(data.subarray(i + 8));
8254 i += 16;
8255 }
8256
8257 i += 2; // reserved
8258
8259 var referenceCount = view.getUint16(i);
8260 i += 2; // start of references
8261
8262 for (; referenceCount > 0; i += 12, referenceCount--) {
8263 result.references.push({
8264 referenceType: (data[i] & 0x80) >>> 7,
8265 referencedSize: view.getUint32(i) & 0x7FFFFFFF,
8266 subsegmentDuration: view.getUint32(i + 4),
8267 startsWithSap: !!(data[i + 8] & 0x80),
8268 sapType: (data[i + 8] & 0x70) >>> 4,
8269 sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
8270 });
8271 }
8272
8273 return result;
8274 };
8275
8276 var parseSidx_1 = parseSidx;
8277
8278 var ID3 = toUint8([0x49, 0x44, 0x33]);
8279 var getId3Size = function getId3Size(bytes, offset) {
8280 if (offset === void 0) {
8281 offset = 0;
8282 }
8283
8284 bytes = toUint8(bytes);
8285 var flags = bytes[offset + 5];
8286 var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
8287 var footerPresent = (flags & 16) >> 4;
8288
8289 if (footerPresent) {
8290 return returnSize + 20;
8291 }
8292
8293 return returnSize + 10;
8294 };
8295 var getId3Offset = function getId3Offset(bytes, offset) {
8296 if (offset === void 0) {
8297 offset = 0;
8298 }
8299
8300 bytes = toUint8(bytes);
8301
8302 if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
8303 offset: offset
8304 })) {
8305 return offset;
8306 }
8307
8308 offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
8309 // have multiple ID3 tag sections even though
8310 // they should not.
8311
8312 return getId3Offset(bytes, offset);
8313 };
8314
8315 var normalizePath$1 = function normalizePath(path) {
8316 if (typeof path === 'string') {
8317 return stringToBytes(path);
8318 }
8319
8320 if (typeof path === 'number') {
8321 return path;
8322 }
8323
8324 return path;
8325 };
8326
8327 var normalizePaths$1 = function normalizePaths(paths) {
8328 if (!Array.isArray(paths)) {
8329 return [normalizePath$1(paths)];
8330 }
8331
8332 return paths.map(function (p) {
8333 return normalizePath$1(p);
8334 });
8335 };
8336 /**
8337 * find any number of boxes by name given a path to it in an iso bmff
8338 * such as mp4.
8339 *
8340 * @param {TypedArray} bytes
8341 * bytes for the iso bmff to search for boxes in
8342 *
8343 * @param {Uint8Array[]|string[]|string|Uint8Array} name
8344 * An array of paths or a single path representing the name
8345 * of boxes to search through in bytes. Paths may be
8346 * uint8 (character codes) or strings.
8347 *
8348 * @param {boolean} [complete=false]
8349 * Should we search only for complete boxes on the final path.
8350 * This is very useful when you do not want to get back partial boxes
8351 * in the case of streaming files.
8352 *
8353 * @return {Uint8Array[]}
8354 * An array of the end paths that we found.
8355 */
8356
8357 var findBox = function findBox(bytes, paths, complete) {
8358 if (complete === void 0) {
8359 complete = false;
8360 }
8361
8362 paths = normalizePaths$1(paths);
8363 bytes = toUint8(bytes);
8364 var results = [];
8365
8366 if (!paths.length) {
8367 // short-circuit the search for empty paths
8368 return results;
8369 }
8370
8371 var i = 0;
8372
8373 while (i < bytes.length) {
8374 var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
8375 var type = bytes.subarray(i + 4, i + 8); // invalid box format.
8376
8377 if (size === 0) {
8378 break;
8379 }
8380
8381 var end = i + size;
8382
8383 if (end > bytes.length) {
8384 // this box is bigger than the number of bytes we have
8385 // and complete is set, we cannot find any more boxes.
8386 if (complete) {
8387 break;
8388 }
8389
8390 end = bytes.length;
8391 }
8392
8393 var data = bytes.subarray(i + 8, end);
8394
8395 if (bytesMatch(type, paths[0])) {
8396 if (paths.length === 1) {
8397 // this is the end of the path and we've found the box we were
8398 // looking for
8399 results.push(data);
8400 } else {
8401 // recursively search for the next box along the path
8402 results.push.apply(results, findBox(data, paths.slice(1), complete));
8403 }
8404 }
8405
8406 i = end;
8407 } // we've finished searching all of bytes
8408
8409
8410 return results;
8411 };
8412
8413 // https://matroska-org.github.io/libebml/specs.html
8414 // https://www.matroska.org/technical/elements.html
8415 // https://www.webmproject.org/docs/container/
8416
8417 var EBML_TAGS = {
8418 EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
8419 DocType: toUint8([0x42, 0x82]),
8420 Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
8421 SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
8422 Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
8423 Track: toUint8([0xAE]),
8424 TrackNumber: toUint8([0xd7]),
8425 DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
8426 TrackEntry: toUint8([0xAE]),
8427 TrackType: toUint8([0x83]),
8428 FlagDefault: toUint8([0x88]),
8429 CodecID: toUint8([0x86]),
8430 CodecPrivate: toUint8([0x63, 0xA2]),
8431 VideoTrack: toUint8([0xe0]),
8432 AudioTrack: toUint8([0xe1]),
8433 // Not used yet, but will be used for live webm/mkv
8434 // see https://www.matroska.org/technical/basics.html#block-structure
8435 // see https://www.matroska.org/technical/basics.html#simpleblock-structure
8436 Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
8437 Timestamp: toUint8([0xE7]),
8438 TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
8439 BlockGroup: toUint8([0xA0]),
8440 BlockDuration: toUint8([0x9B]),
8441 Block: toUint8([0xA1]),
8442 SimpleBlock: toUint8([0xA3])
8443 };
8444 /**
8445 * This is a simple table to determine the length
8446 * of things in ebml. The length is one based (starts at 1,
8447 * rather than zero) and for every zero bit before a one bit
8448 * we add one to length. We also need this table because in some
8449 * case we have to xor all the length bits from another value.
8450 */
8451
8452 var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
8453
8454 var getLength = function getLength(byte) {
8455 var len = 1;
8456
8457 for (var i = 0; i < LENGTH_TABLE.length; i++) {
8458 if (byte & LENGTH_TABLE[i]) {
8459 break;
8460 }
8461
8462 len++;
8463 }
8464
8465 return len;
8466 }; // length in ebml is stored in the first 4 to 8 bits
8467 // of the first byte. 4 for the id length and 8 for the
8468 // data size length. Length is measured by converting the number to binary
8469 // then 1 + the number of zeros before a 1 is encountered starting
8470 // from the left.
8471
8472
8473 var getvint = function getvint(bytes, offset, removeLength, signed) {
8474 if (removeLength === void 0) {
8475 removeLength = true;
8476 }
8477
8478 if (signed === void 0) {
8479 signed = false;
8480 }
8481
8482 var length = getLength(bytes[offset]);
8483 var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
8484 // as they will be modified below to remove the dataSizeLen bits and we do not
8485 // want to modify the original data. normally we could just call slice on
8486 // uint8array but ie 11 does not support that...
8487
8488 if (removeLength) {
8489 valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
8490 valueBytes[0] ^= LENGTH_TABLE[length - 1];
8491 }
8492
8493 return {
8494 length: length,
8495 value: bytesToNumber(valueBytes, {
8496 signed: signed
8497 }),
8498 bytes: valueBytes
8499 };
8500 };
8501
8502 var normalizePath = function normalizePath(path) {
8503 if (typeof path === 'string') {
8504 return path.match(/.{1,2}/g).map(function (p) {
8505 return normalizePath(p);
8506 });
8507 }
8508
8509 if (typeof path === 'number') {
8510 return numberToBytes(path);
8511 }
8512
8513 return path;
8514 };
8515
8516 var normalizePaths = function normalizePaths(paths) {
8517 if (!Array.isArray(paths)) {
8518 return [normalizePath(paths)];
8519 }
8520
8521 return paths.map(function (p) {
8522 return normalizePath(p);
8523 });
8524 };
8525
8526 var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
8527 if (offset >= bytes.length) {
8528 return bytes.length;
8529 }
8530
8531 var innerid = getvint(bytes, offset, false);
8532
8533 if (bytesMatch(id.bytes, innerid.bytes)) {
8534 return offset;
8535 }
8536
8537 var dataHeader = getvint(bytes, offset + innerid.length);
8538 return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
8539 };
8540 /**
8541 * Notes on the EBLM format.
8542 *
8543 * EBLM uses "vints" tags. Every vint tag contains
8544 * two parts
8545 *
8546 * 1. The length from the first byte. You get this by
8547 * converting the byte to binary and counting the zeros
8548 * before a 1. Then you add 1 to that. Examples
8549 * 00011111 = length 4 because there are 3 zeros before a 1.
8550 * 00100000 = length 3 because there are 2 zeros before a 1.
8551 * 00000011 = length 7 because there are 6 zeros before a 1.
8552 *
8553 * 2. The bits used for length are removed from the first byte
8554 * Then all the bytes are merged into a value. NOTE: this
8555 * is not the case for id ebml tags as there id includes
8556 * length bits.
8557 *
8558 */
8559
8560
8561 var findEbml = function findEbml(bytes, paths) {
8562 paths = normalizePaths(paths);
8563 bytes = toUint8(bytes);
8564 var results = [];
8565
8566 if (!paths.length) {
8567 return results;
8568 }
8569
8570 var i = 0;
8571
8572 while (i < bytes.length) {
8573 var id = getvint(bytes, i, false);
8574 var dataHeader = getvint(bytes, i + id.length);
8575 var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
8576
8577 if (dataHeader.value === 0x7f) {
8578 dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
8579
8580 if (dataHeader.value !== bytes.length) {
8581 dataHeader.value -= dataStart;
8582 }
8583 }
8584
8585 var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
8586 var data = bytes.subarray(dataStart, dataEnd);
8587
8588 if (bytesMatch(paths[0], id.bytes)) {
8589 if (paths.length === 1) {
8590 // this is the end of the paths and we've found the tag we were
8591 // looking for
8592 results.push(data);
8593 } else {
8594 // recursively search for the next tag inside of the data
8595 // of this one
8596 results = results.concat(findEbml(data, paths.slice(1)));
8597 }
8598 }
8599
8600 var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
8601
8602 i += totalLength;
8603 }
8604
8605 return results;
8606 }; // see https://www.matroska.org/technical/basics.html#block-structure
8607
8608 var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
8609 var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
8610 var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
8611 /**
8612 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8613 * Sequence Payload"
8614 *
8615 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8616 * unit
8617 * @return {Uint8Array} the RBSP without any Emulation
8618 * Prevention Bytes
8619 */
8620
8621 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
8622 var positions = [];
8623 var i = 1; // Find all `Emulation Prevention Bytes`
8624
8625 while (i < bytes.length - 2) {
8626 if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
8627 positions.push(i + 2);
8628 i++;
8629 }
8630
8631 i++;
8632 } // If no Emulation Prevention Bytes were found just return the original
8633 // array
8634
8635
8636 if (positions.length === 0) {
8637 return bytes;
8638 } // Create a new array to hold the NAL unit data
8639
8640
8641 var newLength = bytes.length - positions.length;
8642 var newData = new Uint8Array(newLength);
8643 var sourceIndex = 0;
8644
8645 for (i = 0; i < newLength; sourceIndex++, i++) {
8646 if (sourceIndex === positions[0]) {
8647 // Skip this byte
8648 sourceIndex++; // Remove this position index
8649
8650 positions.shift();
8651 }
8652
8653 newData[i] = bytes[sourceIndex];
8654 }
8655
8656 return newData;
8657 };
8658 var findNal = function findNal(bytes, dataType, types, nalLimit) {
8659 if (nalLimit === void 0) {
8660 nalLimit = Infinity;
8661 }
8662
8663 bytes = toUint8(bytes);
8664 types = [].concat(types);
8665 var i = 0;
8666 var nalStart;
8667 var nalsFound = 0; // keep searching until:
8668 // we reach the end of bytes
8669 // we reach the maximum number of nals they want to seach
8670 // NOTE: that we disregard nalLimit when we have found the start
8671 // of the nal we want so that we can find the end of the nal we want.
8672
8673 while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
8674 var nalOffset = void 0;
8675
8676 if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
8677 nalOffset = 4;
8678 } else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
8679 nalOffset = 3;
8680 } // we are unsynced,
8681 // find the next nal unit
8682
8683
8684 if (!nalOffset) {
8685 i++;
8686 continue;
8687 }
8688
8689 nalsFound++;
8690
8691 if (nalStart) {
8692 return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
8693 }
8694
8695 var nalType = void 0;
8696
8697 if (dataType === 'h264') {
8698 nalType = bytes[i + nalOffset] & 0x1f;
8699 } else if (dataType === 'h265') {
8700 nalType = bytes[i + nalOffset] >> 1 & 0x3f;
8701 }
8702
8703 if (types.indexOf(nalType) !== -1) {
8704 nalStart = i + nalOffset;
8705 } // nal header is 1 length for h264, and 2 for h265
8706
8707
8708 i += nalOffset + (dataType === 'h264' ? 1 : 2);
8709 }
8710
8711 return bytes.subarray(0, 0);
8712 };
8713 var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
8714 return findNal(bytes, 'h264', type, nalLimit);
8715 };
8716 var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
8717 return findNal(bytes, 'h265', type, nalLimit);
8718 };
8719
8720 var CONSTANTS = {
8721 // "webm" string literal in hex
8722 'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
8723 // "matroska" string literal in hex
8724 'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
8725 // "fLaC" string literal in hex
8726 'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
8727 // "OggS" string literal in hex
8728 'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
8729 // ac-3 sync byte, also works for ec-3 as that is simply a codec
8730 // of ac-3
8731 'ac3': toUint8([0x0b, 0x77]),
8732 // "RIFF" string literal in hex used for wav and avi
8733 'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
8734 // "AVI" string literal in hex
8735 'avi': toUint8([0x41, 0x56, 0x49]),
8736 // "WAVE" string literal in hex
8737 'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
8738 // "ftyp3g" string literal in hex
8739 '3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
8740 // "ftyp" string literal in hex
8741 'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
8742 // "styp" string literal in hex
8743 'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
8744 // "ftypqt" string literal in hex
8745 'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),
8746 // moov string literal in hex
8747 'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),
8748 // moof string literal in hex
8749 'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])
8750 };
8751 var _isLikely = {
8752 aac: function aac(bytes) {
8753 var offset = getId3Offset(bytes);
8754 return bytesMatch(bytes, [0xFF, 0x10], {
8755 offset: offset,
8756 mask: [0xFF, 0x16]
8757 });
8758 },
8759 mp3: function mp3(bytes) {
8760 var offset = getId3Offset(bytes);
8761 return bytesMatch(bytes, [0xFF, 0x02], {
8762 offset: offset,
8763 mask: [0xFF, 0x06]
8764 });
8765 },
8766 webm: function webm(bytes) {
8767 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
8768
8769 return bytesMatch(docType, CONSTANTS.webm);
8770 },
8771 mkv: function mkv(bytes) {
8772 var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
8773
8774 return bytesMatch(docType, CONSTANTS.matroska);
8775 },
8776 mp4: function mp4(bytes) {
8777 // if this file is another base media file format, it is not mp4
8778 if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {
8779 return false;
8780 } // if this file starts with a ftyp or styp box its mp4
8781
8782
8783 if (bytesMatch(bytes, CONSTANTS.mp4, {
8784 offset: 4
8785 }) || bytesMatch(bytes, CONSTANTS.fmp4, {
8786 offset: 4
8787 })) {
8788 return true;
8789 } // if this file starts with a moof/moov box its mp4
8790
8791
8792 if (bytesMatch(bytes, CONSTANTS.moof, {
8793 offset: 4
8794 }) || bytesMatch(bytes, CONSTANTS.moov, {
8795 offset: 4
8796 })) {
8797 return true;
8798 }
8799 },
8800 mov: function mov(bytes) {
8801 return bytesMatch(bytes, CONSTANTS.mov, {
8802 offset: 4
8803 });
8804 },
8805 '3gp': function gp(bytes) {
8806 return bytesMatch(bytes, CONSTANTS['3gp'], {
8807 offset: 4
8808 });
8809 },
8810 ac3: function ac3(bytes) {
8811 var offset = getId3Offset(bytes);
8812 return bytesMatch(bytes, CONSTANTS.ac3, {
8813 offset: offset
8814 });
8815 },
8816 ts: function ts(bytes) {
8817 if (bytes.length < 189 && bytes.length >= 1) {
8818 return bytes[0] === 0x47;
8819 }
8820
8821 var i = 0; // check the first 376 bytes for two matching sync bytes
8822
8823 while (i + 188 < bytes.length && i < 188) {
8824 if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
8825 return true;
8826 }
8827
8828 i += 1;
8829 }
8830
8831 return false;
8832 },
8833 flac: function flac(bytes) {
8834 var offset = getId3Offset(bytes);
8835 return bytesMatch(bytes, CONSTANTS.flac, {
8836 offset: offset
8837 });
8838 },
8839 ogg: function ogg(bytes) {
8840 return bytesMatch(bytes, CONSTANTS.ogg);
8841 },
8842 avi: function avi(bytes) {
8843 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
8844 offset: 8
8845 });
8846 },
8847 wav: function wav(bytes) {
8848 return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
8849 offset: 8
8850 });
8851 },
8852 'h264': function h264(bytes) {
8853 // find seq_parameter_set_rbsp
8854 return findH264Nal(bytes, 7, 3).length;
8855 },
8856 'h265': function h265(bytes) {
8857 // find video_parameter_set_rbsp or seq_parameter_set_rbsp
8858 return findH265Nal(bytes, [32, 33], 3).length;
8859 }
8860 }; // get all the isLikely functions
8861 // but make sure 'ts' is above h264 and h265
8862 // but below everything else as it is the least specific
8863
8864 var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
8865 .filter(function (t) {
8866 return t !== 'ts' && t !== 'h264' && t !== 'h265';
8867 }) // add it back to the bottom
8868 .concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
8869
8870 isLikelyTypes.forEach(function (type) {
8871 var isLikelyFn = _isLikely[type];
8872
8873 _isLikely[type] = function (bytes) {
8874 return isLikelyFn(toUint8(bytes));
8875 };
8876 }); // export after wrapping
8877
8878 var isLikely = _isLikely; // A useful list of file signatures can be found here
8879 // https://en.wikipedia.org/wiki/List_of_file_signatures
8880
8881 var detectContainerForBytes = function detectContainerForBytes(bytes) {
8882 bytes = toUint8(bytes);
8883
8884 for (var i = 0; i < isLikelyTypes.length; i++) {
8885 var type = isLikelyTypes[i];
8886
8887 if (isLikely[type](bytes)) {
8888 return type;
8889 }
8890 }
8891
8892 return '';
8893 }; // fmp4 is not a container
8894
8895 var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
8896 return findBox(bytes, ['moof']).length > 0;
8897 };
8898
8899 // which will only happen if the request is complete.
8900
8901 var callbackOnCompleted = function callbackOnCompleted(request, cb) {
8902 if (request.readyState === 4) {
8903 return cb();
8904 }
8905
8906 return;
8907 };
8908
8909 var containerRequest = function containerRequest(uri, xhr, cb) {
8910 var bytes = [];
8911 var id3Offset;
8912 var finished = false;
8913
8914 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
8915 req.abort();
8916 finished = true;
8917 return cb(err, req, type, _bytes);
8918 };
8919
8920 var progressListener = function progressListener(error, request) {
8921 if (finished) {
8922 return;
8923 }
8924
8925 if (error) {
8926 return endRequestAndCallback(error, request, '', bytes);
8927 } // grap the new part of content that was just downloaded
8928
8929
8930 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
8931
8932 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
8933 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
8934 // or we need at least two bytes after an id3Offset
8935
8936 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
8937 return callbackOnCompleted(request, function () {
8938 return endRequestAndCallback(error, request, '', bytes);
8939 });
8940 }
8941
8942 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
8943 // to see the second sync byte, wait until we have enough data
8944 // before declaring it ts
8945
8946 if (type === 'ts' && bytes.length < 188) {
8947 return callbackOnCompleted(request, function () {
8948 return endRequestAndCallback(error, request, '', bytes);
8949 });
8950 } // this may be an unsynced ts segment
8951 // wait for 376 bytes before detecting no container
8952
8953
8954 if (!type && bytes.length < 376) {
8955 return callbackOnCompleted(request, function () {
8956 return endRequestAndCallback(error, request, '', bytes);
8957 });
8958 }
8959
8960 return endRequestAndCallback(null, request, type, bytes);
8961 };
8962
8963 var options = {
8964 uri: uri,
8965 beforeSend: function beforeSend(request) {
8966 // this forces the browser to pass the bytes to us unprocessed
8967 request.overrideMimeType('text/plain; charset=x-user-defined');
8968 request.addEventListener('progress', function (_ref) {
8969 _ref.total;
8970 _ref.loaded;
8971 return callbackWrapper(request, null, {
8972 statusCode: request.status
8973 }, progressListener);
8974 });
8975 }
8976 };
8977 var request = xhr(options, function (error, response) {
8978 return callbackWrapper(request, error, response, progressListener);
8979 });
8980 return request;
8981 };
8982
8983 var EventTarget = videojs__default["default"].EventTarget,
8984 mergeOptions = videojs__default["default"].mergeOptions;
8985
8986 var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
8987 if (!isPlaylistUnchanged(a, b)) {
8988 return false;
8989 } // for dash the above check will often return true in scenarios where
8990 // the playlist actually has changed because mediaSequence isn't a
8991 // dash thing, and we often set it to 1. So if the playlists have the same amount
8992 // of segments we return true.
8993 // So for dash we need to make sure that the underlying segments are different.
8994 // if sidx changed then the playlists are different.
8995
8996
8997 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
8998 return false;
8999 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
9000 return false;
9001 } // one or the other does not have segments
9002 // there was a change.
9003
9004
9005 if (a.segments && !b.segments || !a.segments && b.segments) {
9006 return false;
9007 } // neither has segments nothing changed
9008
9009
9010 if (!a.segments && !b.segments) {
9011 return true;
9012 } // check segments themselves
9013
9014
9015 for (var i = 0; i < a.segments.length; i++) {
9016 var aSegment = a.segments[i];
9017 var bSegment = b.segments[i]; // if uris are different between segments there was a change
9018
9019 if (aSegment.uri !== bSegment.uri) {
9020 return false;
9021 } // neither segment has a byterange, there will be no byterange change.
9022
9023
9024 if (!aSegment.byterange && !bSegment.byterange) {
9025 continue;
9026 }
9027
9028 var aByterange = aSegment.byterange;
9029 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
9030
9031 if (aByterange && !bByterange || !aByterange && bByterange) {
9032 return false;
9033 } // if both segments have byterange with different offsets, there was a change.
9034
9035
9036 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
9037 return false;
9038 }
9039 } // if everything was the same with segments, this is the same playlist.
9040
9041
9042 return true;
9043 };
9044 /**
9045 * Use the representation IDs from the mpd object to create groupIDs, the NAME is set to mandatory representation
9046 * ID in the parser. This allows for continuous playout across periods with the same representation IDs
9047 * (continuous periods as defined in DASH-IF 3.2.12). This is assumed in the mpd-parser as well. If we want to support
9048 * periods without continuous playback this function may need modification as well as the parser.
9049 */
9050
9051
9052 var dashGroupId = function dashGroupId(type, group, label, playlist) {
9053 // If the manifest somehow does not have an ID (non-dash compliant), use the label.
9054 var playlistId = playlist.attributes.NAME || label;
9055 return "placeholder-uri-" + type + "-" + group + "-" + playlistId;
9056 };
9057 /**
9058 * Parses the master XML string and updates playlist URI references.
9059 *
9060 * @param {Object} config
9061 * Object of arguments
9062 * @param {string} config.masterXml
9063 * The mpd XML
9064 * @param {string} config.srcUrl
9065 * The mpd URL
9066 * @param {Date} config.clientOffset
9067 * A time difference between server and client
9068 * @param {Object} config.sidxMapping
9069 * SIDX mappings for moof/mdat URIs and byte ranges
9070 * @return {Object}
9071 * The parsed mpd manifest object
9072 */
9073
9074
9075 var parseMasterXml = function parseMasterXml(_ref) {
9076 var masterXml = _ref.masterXml,
9077 srcUrl = _ref.srcUrl,
9078 clientOffset = _ref.clientOffset,
9079 sidxMapping = _ref.sidxMapping,
9080 previousManifest = _ref.previousManifest;
9081 var manifest = parse(masterXml, {
9082 manifestUri: srcUrl,
9083 clientOffset: clientOffset,
9084 sidxMapping: sidxMapping,
9085 previousManifest: previousManifest
9086 });
9087 addPropertiesToMaster(manifest, srcUrl, dashGroupId);
9088 return manifest;
9089 };
9090 /**
9091 * Removes any mediaGroup labels that no longer exist in the newMaster
9092 *
9093 * @param {Object} update
9094 * The previous mpd object being updated
9095 * @param {Object} newMaster
9096 * The new mpd object
9097 */
9098
9099 var removeOldMediaGroupLabels = function removeOldMediaGroupLabels(update, newMaster) {
9100 forEachMediaGroup$1(update, function (properties, type, group, label) {
9101 if (!(label in newMaster.mediaGroups[type][group])) {
9102 delete update.mediaGroups[type][group][label];
9103 }
9104 });
9105 };
9106 /**
9107 * Returns a new master manifest that is the result of merging an updated master manifest
9108 * into the original version.
9109 *
9110 * @param {Object} oldMaster
9111 * The old parsed mpd object
9112 * @param {Object} newMaster
9113 * The updated parsed mpd object
9114 * @return {Object}
9115 * A new object representing the original master manifest with the updated media
9116 * playlists merged in
9117 */
9118
9119
9120 var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
9121 var noChanges = true;
9122 var update = mergeOptions(oldMaster, {
9123 // These are top level properties that can be updated
9124 duration: newMaster.duration,
9125 minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
9126 timelineStarts: newMaster.timelineStarts
9127 }); // First update the playlists in playlist list
9128
9129 for (var i = 0; i < newMaster.playlists.length; i++) {
9130 var playlist = newMaster.playlists[i];
9131
9132 if (playlist.sidx) {
9133 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
9134
9135 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
9136 addSidxSegmentsToPlaylist$1(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
9137 }
9138 }
9139
9140 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
9141
9142 if (playlistUpdate) {
9143 update = playlistUpdate;
9144 noChanges = false;
9145 }
9146 } // Then update media group playlists
9147
9148
9149 forEachMediaGroup$1(newMaster, function (properties, type, group, label) {
9150 if (properties.playlists && properties.playlists.length) {
9151 var id = properties.playlists[0].id;
9152
9153 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
9154
9155 if (_playlistUpdate) {
9156 update = _playlistUpdate; // add new mediaGroup label if it doesn't exist and assign the new mediaGroup.
9157
9158 if (!(label in update.mediaGroups[type][group])) {
9159 update.mediaGroups[type][group][label] = properties;
9160 } // update the playlist reference within media groups
9161
9162
9163 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
9164 noChanges = false;
9165 }
9166 }
9167 }); // remove mediaGroup labels and references that no longer exist in the newMaster
9168
9169 removeOldMediaGroupLabels(update, newMaster);
9170
9171 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9172 noChanges = false;
9173 }
9174
9175 if (noChanges) {
9176 return null;
9177 }
9178
9179 return update;
9180 }; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
9181 // If the SIDXs have maps, the two maps should match,
9182 // both `a` and `b` missing SIDXs is considered matching.
9183 // If `a` or `b` but not both have a map, they aren't matching.
9184
9185 var equivalentSidx = function equivalentSidx(a, b) {
9186 var neitherMap = Boolean(!a.map && !b.map);
9187 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
9188 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
9189 }; // exported for testing
9190
9191
9192 var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
9193 var newSidxMapping = {};
9194
9195 for (var id in playlists) {
9196 var playlist = playlists[id];
9197 var currentSidxInfo = playlist.sidx;
9198
9199 if (currentSidxInfo) {
9200 var key = generateSidxKey(currentSidxInfo);
9201
9202 if (!oldSidxMapping[key]) {
9203 break;
9204 }
9205
9206 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
9207
9208 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
9209 newSidxMapping[key] = oldSidxMapping[key];
9210 }
9211 }
9212 }
9213
9214 return newSidxMapping;
9215 };
9216 /**
9217 * A function that filters out changed items as they need to be requested separately.
9218 *
9219 * The method is exported for testing
9220 *
9221 * @param {Object} master the parsed mpd XML returned via mpd-parser
9222 * @param {Object} oldSidxMapping the SIDX to compare against
9223 */
9224
9225 var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
9226 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
9227 var mediaGroupSidx = videoSidx;
9228 forEachMediaGroup$1(master, function (properties, mediaType, groupKey, labelKey) {
9229 if (properties.playlists && properties.playlists.length) {
9230 var playlists = properties.playlists;
9231 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
9232 }
9233 });
9234 return mediaGroupSidx;
9235 };
9236
9237 var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
9238 inheritsLoose(DashPlaylistLoader, _EventTarget);
9239
9240 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
9241 // playlist loader setups from media groups will expect to be able to pass a playlist
9242 // (since there aren't external URLs to media playlists with DASH)
9243 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
9244 var _this;
9245
9246 if (options === void 0) {
9247 options = {};
9248 }
9249
9250 _this = _EventTarget.call(this) || this;
9251 _this.masterPlaylistLoader_ = masterPlaylistLoader || assertThisInitialized(_this);
9252
9253 if (!masterPlaylistLoader) {
9254 _this.isMaster_ = true;
9255 }
9256
9257 var _options = options,
9258 _options$withCredenti = _options.withCredentials,
9259 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
9260 _options$handleManife = _options.handleManifestRedirects,
9261 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
9262 _this.vhs_ = vhs;
9263 _this.withCredentials = withCredentials;
9264 _this.handleManifestRedirects = handleManifestRedirects;
9265
9266 if (!srcUrlOrPlaylist) {
9267 throw new Error('A non-empty playlist URL or object is required');
9268 } // event naming?
9269
9270
9271 _this.on('minimumUpdatePeriod', function () {
9272 _this.refreshXml_();
9273 }); // live playlist staleness timeout
9274
9275
9276 _this.on('mediaupdatetimeout', function () {
9277 _this.refreshMedia_(_this.media().id);
9278 });
9279
9280 _this.state = 'HAVE_NOTHING';
9281 _this.loadedPlaylists_ = {};
9282 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
9283 // The masterPlaylistLoader will be created with a string
9284
9285 if (_this.isMaster_) {
9286 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
9287 // once multi-period is refactored
9288
9289 _this.masterPlaylistLoader_.sidxMapping_ = {};
9290 } else {
9291 _this.childPlaylist_ = srcUrlOrPlaylist;
9292 }
9293
9294 return _this;
9295 }
9296
9297 var _proto = DashPlaylistLoader.prototype;
9298
9299 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
9300 // disposed
9301 if (!this.request) {
9302 return true;
9303 } // pending request is cleared
9304
9305
9306 this.request = null;
9307
9308 if (err) {
9309 // use the provided error object or create one
9310 // based on the request/response
9311 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
9312 status: request.status,
9313 message: 'DASH request error at URL: ' + request.uri,
9314 response: request.response,
9315 // MEDIA_ERR_NETWORK
9316 code: 2
9317 };
9318
9319 if (startingState) {
9320 this.state = startingState;
9321 }
9322
9323 this.trigger('error');
9324 return true;
9325 }
9326 }
9327 /**
9328 * Verify that the container of the sidx segment can be parsed
9329 * and if it can, get and parse that segment.
9330 */
9331 ;
9332
9333 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
9334 var _this2 = this;
9335
9336 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
9337
9338 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
9339 // keep this function async
9340 this.mediaRequest_ = window.setTimeout(function () {
9341 return cb(false);
9342 }, 0);
9343 return;
9344 } // resolve the segment URL relative to the playlist
9345
9346
9347 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
9348
9349 var fin = function fin(err, request) {
9350 if (_this2.requestErrored_(err, request, startingState)) {
9351 return;
9352 }
9353
9354 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
9355 var sidx;
9356
9357 try {
9358 sidx = parseSidx_1(toUint8(request.response).subarray(8));
9359 } catch (e) {
9360 // sidx parsing failed.
9361 _this2.requestErrored_(e, request, startingState);
9362
9363 return;
9364 }
9365
9366 sidxMapping[sidxKey] = {
9367 sidxInfo: playlist.sidx,
9368 sidx: sidx
9369 };
9370 addSidxSegmentsToPlaylist$1(playlist, sidx, playlist.sidx.resolvedUri);
9371 return cb(true);
9372 };
9373
9374 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
9375 if (err) {
9376 return fin(err, request);
9377 }
9378
9379 if (!container || container !== 'mp4') {
9380 return fin({
9381 status: request.status,
9382 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
9383 // response is just bytes in this case
9384 // but we really don't want to return that.
9385 response: '',
9386 playlist: playlist,
9387 internal: true,
9388 blacklistDuration: Infinity,
9389 // MEDIA_ERR_NETWORK
9390 code: 2
9391 }, request);
9392 } // if we already downloaded the sidx bytes in the container request, use them
9393
9394
9395 var _playlist$sidx$bytera = playlist.sidx.byterange,
9396 offset = _playlist$sidx$bytera.offset,
9397 length = _playlist$sidx$bytera.length;
9398
9399 if (bytes.length >= length + offset) {
9400 return fin(err, {
9401 response: bytes.subarray(offset, offset + length),
9402 status: request.status,
9403 uri: request.uri
9404 });
9405 } // otherwise request sidx bytes
9406
9407
9408 _this2.request = _this2.vhs_.xhr({
9409 uri: uri,
9410 responseType: 'arraybuffer',
9411 headers: segmentXhrHeaders({
9412 byterange: playlist.sidx.byterange
9413 })
9414 }, fin);
9415 });
9416 };
9417
9418 _proto.dispose = function dispose() {
9419 this.trigger('dispose');
9420 this.stopRequest();
9421 this.loadedPlaylists_ = {};
9422 window.clearTimeout(this.minimumUpdatePeriodTimeout_);
9423 window.clearTimeout(this.mediaRequest_);
9424 window.clearTimeout(this.mediaUpdateTimeout);
9425 this.mediaUpdateTimeout = null;
9426 this.mediaRequest_ = null;
9427 this.minimumUpdatePeriodTimeout_ = null;
9428
9429 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9430 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9431 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9432 }
9433
9434 this.off();
9435 };
9436
9437 _proto.hasPendingRequest = function hasPendingRequest() {
9438 return this.request || this.mediaRequest_;
9439 };
9440
9441 _proto.stopRequest = function stopRequest() {
9442 if (this.request) {
9443 var oldRequest = this.request;
9444 this.request = null;
9445 oldRequest.onreadystatechange = null;
9446 oldRequest.abort();
9447 }
9448 };
9449
9450 _proto.media = function media(playlist) {
9451 var _this3 = this;
9452
9453 // getter
9454 if (!playlist) {
9455 return this.media_;
9456 } // setter
9457
9458
9459 if (this.state === 'HAVE_NOTHING') {
9460 throw new Error('Cannot switch media playlist from ' + this.state);
9461 }
9462
9463 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
9464
9465 if (typeof playlist === 'string') {
9466 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
9467 throw new Error('Unknown playlist URI: ' + playlist);
9468 }
9469
9470 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
9471 }
9472
9473 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
9474
9475 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
9476 this.state = 'HAVE_METADATA';
9477 this.media_ = playlist; // trigger media change if the active media has been updated
9478
9479 if (mediaChange) {
9480 this.trigger('mediachanging');
9481 this.trigger('mediachange');
9482 }
9483
9484 return;
9485 } // switching to the active playlist is a no-op
9486
9487
9488 if (!mediaChange) {
9489 return;
9490 } // switching from an already loaded playlist
9491
9492
9493 if (this.media_) {
9494 this.trigger('mediachanging');
9495 }
9496
9497 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
9498 // everything is ready just continue to haveMetadata
9499 _this3.haveMetadata({
9500 startingState: startingState,
9501 playlist: playlist
9502 });
9503 });
9504 };
9505
9506 _proto.haveMetadata = function haveMetadata(_ref2) {
9507 var startingState = _ref2.startingState,
9508 playlist = _ref2.playlist;
9509 this.state = 'HAVE_METADATA';
9510 this.loadedPlaylists_[playlist.id] = playlist;
9511 this.mediaRequest_ = null; // This will trigger loadedplaylist
9512
9513 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
9514 // to resolve setup of media groups
9515
9516 if (startingState === 'HAVE_MASTER') {
9517 this.trigger('loadedmetadata');
9518 } else {
9519 // trigger media change if the active media has been updated
9520 this.trigger('mediachange');
9521 }
9522 };
9523
9524 _proto.pause = function pause() {
9525 if (this.masterPlaylistLoader_.createMupOnMedia_) {
9526 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
9527 this.masterPlaylistLoader_.createMupOnMedia_ = null;
9528 }
9529
9530 this.stopRequest();
9531 window.clearTimeout(this.mediaUpdateTimeout);
9532 this.mediaUpdateTimeout = null;
9533
9534 if (this.isMaster_) {
9535 window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
9536 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
9537 }
9538
9539 if (this.state === 'HAVE_NOTHING') {
9540 // If we pause the loader before any data has been retrieved, its as if we never
9541 // started, so reset to an unstarted state.
9542 this.started = false;
9543 }
9544 };
9545
9546 _proto.load = function load(isFinalRendition) {
9547 var _this4 = this;
9548
9549 window.clearTimeout(this.mediaUpdateTimeout);
9550 this.mediaUpdateTimeout = null;
9551 var media = this.media();
9552
9553 if (isFinalRendition) {
9554 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
9555 this.mediaUpdateTimeout = window.setTimeout(function () {
9556 return _this4.load();
9557 }, delay);
9558 return;
9559 } // because the playlists are internal to the manifest, load should either load the
9560 // main manifest, or do nothing but trigger an event
9561
9562
9563 if (!this.started) {
9564 this.start();
9565 return;
9566 }
9567
9568 if (media && !media.endList) {
9569 // Check to see if this is the master loader and the MUP was cleared (this happens
9570 // when the loader was paused). `media` should be set at this point since one is always
9571 // set during `start()`.
9572 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
9573 // Trigger minimumUpdatePeriod to refresh the master manifest
9574 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
9575
9576 this.updateMinimumUpdatePeriodTimeout_();
9577 }
9578
9579 this.trigger('mediaupdatetimeout');
9580 } else {
9581 this.trigger('loadedplaylist');
9582 }
9583 };
9584
9585 _proto.start = function start() {
9586 var _this5 = this;
9587
9588 this.started = true; // We don't need to request the master manifest again
9589 // Call this asynchronously to match the xhr request behavior below
9590
9591 if (!this.isMaster_) {
9592 this.mediaRequest_ = window.setTimeout(function () {
9593 return _this5.haveMaster_();
9594 }, 0);
9595 return;
9596 }
9597
9598 this.requestMaster_(function (req, masterChanged) {
9599 _this5.haveMaster_();
9600
9601 if (!_this5.hasPendingRequest() && !_this5.media_) {
9602 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
9603 }
9604 });
9605 };
9606
9607 _proto.requestMaster_ = function requestMaster_(cb) {
9608 var _this6 = this;
9609
9610 this.request = this.vhs_.xhr({
9611 uri: this.masterPlaylistLoader_.srcUrl,
9612 withCredentials: this.withCredentials
9613 }, function (error, req) {
9614 if (_this6.requestErrored_(error, req)) {
9615 if (_this6.state === 'HAVE_NOTHING') {
9616 _this6.started = false;
9617 }
9618
9619 return;
9620 }
9621
9622 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
9623 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
9624
9625 if (req.responseHeaders && req.responseHeaders.date) {
9626 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
9627 } else {
9628 _this6.masterLoaded_ = Date.now();
9629 }
9630
9631 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
9632
9633 if (masterChanged) {
9634 _this6.handleMaster_();
9635
9636 _this6.syncClientServerClock_(function () {
9637 return cb(req, masterChanged);
9638 });
9639
9640 return;
9641 }
9642
9643 return cb(req, masterChanged);
9644 });
9645 }
9646 /**
9647 * Parses the master xml for UTCTiming node to sync the client clock to the server
9648 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
9649 *
9650 * @param {Function} done
9651 * Function to call when clock sync has completed
9652 */
9653 ;
9654
9655 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
9656 var _this7 = this;
9657
9658 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
9659 // server clock
9660
9661 if (utcTiming === null) {
9662 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
9663 return done();
9664 }
9665
9666 if (utcTiming.method === 'DIRECT') {
9667 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
9668 return done();
9669 }
9670
9671 this.request = this.vhs_.xhr({
9672 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
9673 method: utcTiming.method,
9674 withCredentials: this.withCredentials
9675 }, function (error, req) {
9676 // disposed
9677 if (!_this7.request) {
9678 return;
9679 }
9680
9681 if (error) {
9682 // sync request failed, fall back to using date header from mpd
9683 // TODO: log warning
9684 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
9685 return done();
9686 }
9687
9688 var serverTime;
9689
9690 if (utcTiming.method === 'HEAD') {
9691 if (!req.responseHeaders || !req.responseHeaders.date) {
9692 // expected date header not preset, fall back to using date header from mpd
9693 // TODO: log warning
9694 serverTime = _this7.masterLoaded_;
9695 } else {
9696 serverTime = Date.parse(req.responseHeaders.date);
9697 }
9698 } else {
9699 serverTime = Date.parse(req.responseText);
9700 }
9701
9702 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
9703 done();
9704 });
9705 };
9706
9707 _proto.haveMaster_ = function haveMaster_() {
9708 this.state = 'HAVE_MASTER';
9709
9710 if (this.isMaster_) {
9711 // We have the master playlist at this point, so
9712 // trigger this to allow MasterPlaylistController
9713 // to make an initial playlist selection
9714 this.trigger('loadedplaylist');
9715 } else if (!this.media_) {
9716 // no media playlist was specifically selected so select
9717 // the one the child playlist loader was created with
9718 this.media(this.childPlaylist_);
9719 }
9720 };
9721
9722 _proto.handleMaster_ = function handleMaster_() {
9723 // clear media request
9724 this.mediaRequest_ = null;
9725 var oldMaster = this.masterPlaylistLoader_.master;
9726 var newMaster = parseMasterXml({
9727 masterXml: this.masterPlaylistLoader_.masterXml_,
9728 srcUrl: this.masterPlaylistLoader_.srcUrl,
9729 clientOffset: this.masterPlaylistLoader_.clientOffset_,
9730 sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
9731 previousManifest: oldMaster
9732 }); // if we have an old master to compare the new master against
9733
9734 if (oldMaster) {
9735 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
9736 } // only update master if we have a new master
9737
9738
9739 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
9740 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
9741
9742 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
9743 this.masterPlaylistLoader_.srcUrl = location;
9744 }
9745
9746 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
9747 this.updateMinimumUpdatePeriodTimeout_();
9748 }
9749
9750 return Boolean(newMaster);
9751 };
9752
9753 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
9754 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
9755 // a new one will be added if needed.
9756
9757 if (mpl.createMupOnMedia_) {
9758 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
9759 mpl.createMupOnMedia_ = null;
9760 } // clear any pending timeouts
9761
9762
9763 if (mpl.minimumUpdatePeriodTimeout_) {
9764 window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
9765 mpl.minimumUpdatePeriodTimeout_ = null;
9766 }
9767
9768 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
9769 // MPD has no future validity, so a new one will need to be acquired when new
9770 // media segments are to be made available. Thus, we use the target duration
9771 // in this case
9772
9773 if (mup === 0) {
9774 if (mpl.media()) {
9775 mup = mpl.media().targetDuration * 1000;
9776 } else {
9777 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
9778 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
9779 }
9780 } // if minimumUpdatePeriod is invalid or <= zero, which
9781 // can happen when a live video becomes VOD. skip timeout
9782 // creation.
9783
9784
9785 if (typeof mup !== 'number' || mup <= 0) {
9786 if (mup < 0) {
9787 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
9788 }
9789
9790 return;
9791 }
9792
9793 this.createMUPTimeout_(mup);
9794 };
9795
9796 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
9797 var mpl = this.masterPlaylistLoader_;
9798 mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(function () {
9799 mpl.minimumUpdatePeriodTimeout_ = null;
9800 mpl.trigger('minimumUpdatePeriod');
9801 mpl.createMUPTimeout_(mup);
9802 }, mup);
9803 }
9804 /**
9805 * Sends request to refresh the master xml and updates the parsed master manifest
9806 */
9807 ;
9808
9809 _proto.refreshXml_ = function refreshXml_() {
9810 var _this8 = this;
9811
9812 this.requestMaster_(function (req, masterChanged) {
9813 if (!masterChanged) {
9814 return;
9815 }
9816
9817 if (_this8.media_) {
9818 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
9819 } // This will filter out updated sidx info from the mapping
9820
9821
9822 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
9823
9824 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
9825 // TODO: do we need to reload the current playlist?
9826 _this8.refreshMedia_(_this8.media().id);
9827 });
9828 });
9829 }
9830 /**
9831 * Refreshes the media playlist by re-parsing the master xml and updating playlist
9832 * references. If this is an alternate loader, the updated parsed manifest is retrieved
9833 * from the master loader.
9834 */
9835 ;
9836
9837 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
9838 var _this9 = this;
9839
9840 if (!mediaID) {
9841 throw new Error('refreshMedia_ must take a media id');
9842 } // for master we have to reparse the master xml
9843 // to re-create segments based on current timing values
9844 // which may change media. We only skip updating master
9845 // if this is the first time this.media_ is being set.
9846 // as master was just parsed in that case.
9847
9848
9849 if (this.media_ && this.isMaster_) {
9850 this.handleMaster_();
9851 }
9852
9853 var playlists = this.masterPlaylistLoader_.master.playlists;
9854 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
9855
9856 if (mediaChanged) {
9857 this.media_ = playlists[mediaID];
9858 } else {
9859 this.trigger('playlistunchanged');
9860 }
9861
9862 if (!this.mediaUpdateTimeout) {
9863 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
9864 if (_this9.media().endList) {
9865 return;
9866 }
9867
9868 _this9.mediaUpdateTimeout = window.setTimeout(function () {
9869 _this9.trigger('mediaupdatetimeout');
9870
9871 createMediaUpdateTimeout();
9872 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
9873 };
9874
9875 createMediaUpdateTimeout();
9876 }
9877
9878 this.trigger('loadedplaylist');
9879 };
9880
9881 return DashPlaylistLoader;
9882 }(EventTarget);
9883
9884 var Config = {
9885 GOAL_BUFFER_LENGTH: 30,
9886 MAX_GOAL_BUFFER_LENGTH: 60,
9887 BACK_BUFFER_LENGTH: 30,
9888 GOAL_BUFFER_LENGTH_RATE: 1,
9889 // 0.5 MB/s
9890 INITIAL_BANDWIDTH: 4194304,
9891 // A fudge factor to apply to advertised playlist bitrates to account for
9892 // temporary flucations in client bandwidth
9893 BANDWIDTH_VARIANCE: 1.2,
9894 // How much of the buffer must be filled before we consider upswitching
9895 BUFFER_LOW_WATER_LINE: 0,
9896 MAX_BUFFER_LOW_WATER_LINE: 30,
9897 // TODO: Remove this when experimentalBufferBasedABR is removed
9898 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
9899 BUFFER_LOW_WATER_LINE_RATE: 1,
9900 // If the buffer is greater than the high water line, we won't switch down
9901 BUFFER_HIGH_WATER_LINE: 30
9902 };
9903
9904 var stringToArrayBuffer = function stringToArrayBuffer(string) {
9905 var view = new Uint8Array(new ArrayBuffer(string.length));
9906
9907 for (var i = 0; i < string.length; i++) {
9908 view[i] = string.charCodeAt(i);
9909 }
9910
9911 return view.buffer;
9912 };
9913
9914 /* global Blob, BlobBuilder, Worker */
9915 // unify worker interface
9916 var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
9917 // node only supports on/off
9918 workerObj.on = workerObj.addEventListener;
9919 workerObj.off = workerObj.removeEventListener;
9920 return workerObj;
9921 };
9922
9923 var createObjectURL = function createObjectURL(str) {
9924 try {
9925 return URL.createObjectURL(new Blob([str], {
9926 type: 'application/javascript'
9927 }));
9928 } catch (e) {
9929 var blob = new BlobBuilder();
9930 blob.append(str);
9931 return URL.createObjectURL(blob.getBlob());
9932 }
9933 };
9934
9935 var factory = function factory(code) {
9936 return function () {
9937 var objectUrl = createObjectURL(code);
9938 var worker = browserWorkerPolyFill(new Worker(objectUrl));
9939 worker.objURL = objectUrl;
9940 var terminate = worker.terminate;
9941 worker.on = worker.addEventListener;
9942 worker.off = worker.removeEventListener;
9943
9944 worker.terminate = function () {
9945 URL.revokeObjectURL(objectUrl);
9946 return terminate.call(this);
9947 };
9948
9949 return worker;
9950 };
9951 };
9952 var transform = function transform(code) {
9953 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
9954 };
9955
9956 var getWorkerString = function getWorkerString(fn) {
9957 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
9958 };
9959
9960 /* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/http-streaming/src/transmuxer-worker.js */
9961 var workerCode$1 = transform(getWorkerString(function () {
9962 /**
9963 * mux.js
9964 *
9965 * Copyright (c) Brightcove
9966 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9967 *
9968 * A lightweight readable stream implemention that handles event dispatching.
9969 * Objects that inherit from streams should call init in their constructors.
9970 */
9971
9972 var Stream = function Stream() {
9973 this.init = function () {
9974 var listeners = {};
9975 /**
9976 * Add a listener for a specified event type.
9977 * @param type {string} the event name
9978 * @param listener {function} the callback to be invoked when an event of
9979 * the specified type occurs
9980 */
9981
9982 this.on = function (type, listener) {
9983 if (!listeners[type]) {
9984 listeners[type] = [];
9985 }
9986
9987 listeners[type] = listeners[type].concat(listener);
9988 };
9989 /**
9990 * Remove a listener for a specified event type.
9991 * @param type {string} the event name
9992 * @param listener {function} a function previously registered for this
9993 * type of event through `on`
9994 */
9995
9996
9997 this.off = function (type, listener) {
9998 var index;
9999
10000 if (!listeners[type]) {
10001 return false;
10002 }
10003
10004 index = listeners[type].indexOf(listener);
10005 listeners[type] = listeners[type].slice();
10006 listeners[type].splice(index, 1);
10007 return index > -1;
10008 };
10009 /**
10010 * Trigger an event of the specified type on this stream. Any additional
10011 * arguments to this function are passed as parameters to event listeners.
10012 * @param type {string} the event name
10013 */
10014
10015
10016 this.trigger = function (type) {
10017 var callbacks, i, length, args;
10018 callbacks = listeners[type];
10019
10020 if (!callbacks) {
10021 return;
10022 } // Slicing the arguments on every invocation of this method
10023 // can add a significant amount of overhead. Avoid the
10024 // intermediate object creation for the common case of a
10025 // single callback argument
10026
10027
10028 if (arguments.length === 2) {
10029 length = callbacks.length;
10030
10031 for (i = 0; i < length; ++i) {
10032 callbacks[i].call(this, arguments[1]);
10033 }
10034 } else {
10035 args = [];
10036 i = arguments.length;
10037
10038 for (i = 1; i < arguments.length; ++i) {
10039 args.push(arguments[i]);
10040 }
10041
10042 length = callbacks.length;
10043
10044 for (i = 0; i < length; ++i) {
10045 callbacks[i].apply(this, args);
10046 }
10047 }
10048 };
10049 /**
10050 * Destroys the stream and cleans up.
10051 */
10052
10053
10054 this.dispose = function () {
10055 listeners = {};
10056 };
10057 };
10058 };
10059 /**
10060 * Forwards all `data` events on this stream to the destination stream. The
10061 * destination stream should provide a method `push` to receive the data
10062 * events as they arrive.
10063 * @param destination {stream} the stream that will receive all `data` events
10064 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
10065 * when the current stream emits a 'done' event
10066 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
10067 */
10068
10069
10070 Stream.prototype.pipe = function (destination) {
10071 this.on('data', function (data) {
10072 destination.push(data);
10073 });
10074 this.on('done', function (flushSource) {
10075 destination.flush(flushSource);
10076 });
10077 this.on('partialdone', function (flushSource) {
10078 destination.partialFlush(flushSource);
10079 });
10080 this.on('endedtimeline', function (flushSource) {
10081 destination.endTimeline(flushSource);
10082 });
10083 this.on('reset', function (flushSource) {
10084 destination.reset(flushSource);
10085 });
10086 return destination;
10087 }; // Default stream functions that are expected to be overridden to perform
10088 // actual work. These are provided by the prototype as a sort of no-op
10089 // implementation so that we don't have to check for their existence in the
10090 // `pipe` function above.
10091
10092
10093 Stream.prototype.push = function (data) {
10094 this.trigger('data', data);
10095 };
10096
10097 Stream.prototype.flush = function (flushSource) {
10098 this.trigger('done', flushSource);
10099 };
10100
10101 Stream.prototype.partialFlush = function (flushSource) {
10102 this.trigger('partialdone', flushSource);
10103 };
10104
10105 Stream.prototype.endTimeline = function (flushSource) {
10106 this.trigger('endedtimeline', flushSource);
10107 };
10108
10109 Stream.prototype.reset = function (flushSource) {
10110 this.trigger('reset', flushSource);
10111 };
10112
10113 var stream = Stream;
10114 var MAX_UINT32$1 = Math.pow(2, 32);
10115
10116 var getUint64$2 = function getUint64(uint8) {
10117 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
10118 var value;
10119
10120 if (dv.getBigUint64) {
10121 value = dv.getBigUint64(0);
10122
10123 if (value < Number.MAX_SAFE_INTEGER) {
10124 return Number(value);
10125 }
10126
10127 return value;
10128 }
10129
10130 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
10131 };
10132
10133 var numbers = {
10134 getUint64: getUint64$2,
10135 MAX_UINT32: MAX_UINT32$1
10136 };
10137 var MAX_UINT32 = numbers.MAX_UINT32;
10138 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
10139
10140 (function () {
10141 var i;
10142 types = {
10143 avc1: [],
10144 // codingname
10145 avcC: [],
10146 btrt: [],
10147 dinf: [],
10148 dref: [],
10149 esds: [],
10150 ftyp: [],
10151 hdlr: [],
10152 mdat: [],
10153 mdhd: [],
10154 mdia: [],
10155 mfhd: [],
10156 minf: [],
10157 moof: [],
10158 moov: [],
10159 mp4a: [],
10160 // codingname
10161 mvex: [],
10162 mvhd: [],
10163 pasp: [],
10164 sdtp: [],
10165 smhd: [],
10166 stbl: [],
10167 stco: [],
10168 stsc: [],
10169 stsd: [],
10170 stsz: [],
10171 stts: [],
10172 styp: [],
10173 tfdt: [],
10174 tfhd: [],
10175 traf: [],
10176 trak: [],
10177 trun: [],
10178 trex: [],
10179 tkhd: [],
10180 vmhd: []
10181 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
10182 // don't throw an error
10183
10184 if (typeof Uint8Array === 'undefined') {
10185 return;
10186 }
10187
10188 for (i in types) {
10189 if (types.hasOwnProperty(i)) {
10190 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
10191 }
10192 }
10193
10194 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
10195 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
10196 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
10197 VIDEO_HDLR = new Uint8Array([0x00, // version 0
10198 0x00, 0x00, 0x00, // flags
10199 0x00, 0x00, 0x00, 0x00, // pre_defined
10200 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
10201 0x00, 0x00, 0x00, 0x00, // reserved
10202 0x00, 0x00, 0x00, 0x00, // reserved
10203 0x00, 0x00, 0x00, 0x00, // reserved
10204 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
10205 ]);
10206 AUDIO_HDLR = new Uint8Array([0x00, // version 0
10207 0x00, 0x00, 0x00, // flags
10208 0x00, 0x00, 0x00, 0x00, // pre_defined
10209 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
10210 0x00, 0x00, 0x00, 0x00, // reserved
10211 0x00, 0x00, 0x00, 0x00, // reserved
10212 0x00, 0x00, 0x00, 0x00, // reserved
10213 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
10214 ]);
10215 HDLR_TYPES = {
10216 video: VIDEO_HDLR,
10217 audio: AUDIO_HDLR
10218 };
10219 DREF = new Uint8Array([0x00, // version 0
10220 0x00, 0x00, 0x00, // flags
10221 0x00, 0x00, 0x00, 0x01, // entry_count
10222 0x00, 0x00, 0x00, 0x0c, // entry_size
10223 0x75, 0x72, 0x6c, 0x20, // 'url' type
10224 0x00, // version 0
10225 0x00, 0x00, 0x01 // entry_flags
10226 ]);
10227 SMHD = new Uint8Array([0x00, // version
10228 0x00, 0x00, 0x00, // flags
10229 0x00, 0x00, // balance, 0 means centered
10230 0x00, 0x00 // reserved
10231 ]);
10232 STCO = new Uint8Array([0x00, // version
10233 0x00, 0x00, 0x00, // flags
10234 0x00, 0x00, 0x00, 0x00 // entry_count
10235 ]);
10236 STSC = STCO;
10237 STSZ = new Uint8Array([0x00, // version
10238 0x00, 0x00, 0x00, // flags
10239 0x00, 0x00, 0x00, 0x00, // sample_size
10240 0x00, 0x00, 0x00, 0x00 // sample_count
10241 ]);
10242 STTS = STCO;
10243 VMHD = new Uint8Array([0x00, // version
10244 0x00, 0x00, 0x01, // flags
10245 0x00, 0x00, // graphicsmode
10246 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
10247 ]);
10248 })();
10249
10250 box = function box(type) {
10251 var payload = [],
10252 size = 0,
10253 i,
10254 result,
10255 view;
10256
10257 for (i = 1; i < arguments.length; i++) {
10258 payload.push(arguments[i]);
10259 }
10260
10261 i = payload.length; // calculate the total size we need to allocate
10262
10263 while (i--) {
10264 size += payload[i].byteLength;
10265 }
10266
10267 result = new Uint8Array(size + 8);
10268 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
10269 view.setUint32(0, result.byteLength);
10270 result.set(type, 4); // copy the payload into the result
10271
10272 for (i = 0, size = 8; i < payload.length; i++) {
10273 result.set(payload[i], size);
10274 size += payload[i].byteLength;
10275 }
10276
10277 return result;
10278 };
10279
10280 dinf = function dinf() {
10281 return box(types.dinf, box(types.dref, DREF));
10282 };
10283
10284 esds = function esds(track) {
10285 return box(types.esds, new Uint8Array([0x00, // version
10286 0x00, 0x00, 0x00, // flags
10287 // ES_Descriptor
10288 0x03, // tag, ES_DescrTag
10289 0x19, // length
10290 0x00, 0x00, // ES_ID
10291 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
10292 // DecoderConfigDescriptor
10293 0x04, // tag, DecoderConfigDescrTag
10294 0x11, // length
10295 0x40, // object type
10296 0x15, // streamType
10297 0x00, 0x06, 0x00, // bufferSizeDB
10298 0x00, 0x00, 0xda, 0xc0, // maxBitrate
10299 0x00, 0x00, 0xda, 0xc0, // avgBitrate
10300 // DecoderSpecificInfo
10301 0x05, // tag, DecoderSpecificInfoTag
10302 0x02, // length
10303 // ISO/IEC 14496-3, AudioSpecificConfig
10304 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
10305 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
10306 ]));
10307 };
10308
10309 ftyp = function ftyp() {
10310 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
10311 };
10312
10313 hdlr = function hdlr(type) {
10314 return box(types.hdlr, HDLR_TYPES[type]);
10315 };
10316
10317 mdat = function mdat(data) {
10318 return box(types.mdat, data);
10319 };
10320
10321 mdhd = function mdhd(track) {
10322 var result = new Uint8Array([0x00, // version 0
10323 0x00, 0x00, 0x00, // flags
10324 0x00, 0x00, 0x00, 0x02, // creation_time
10325 0x00, 0x00, 0x00, 0x03, // modification_time
10326 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10327 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
10328 0x55, 0xc4, // 'und' language (undetermined)
10329 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
10330 // defined. The sample rate can be parsed out of an ADTS header, for
10331 // instance.
10332
10333 if (track.samplerate) {
10334 result[12] = track.samplerate >>> 24 & 0xFF;
10335 result[13] = track.samplerate >>> 16 & 0xFF;
10336 result[14] = track.samplerate >>> 8 & 0xFF;
10337 result[15] = track.samplerate & 0xFF;
10338 }
10339
10340 return box(types.mdhd, result);
10341 };
10342
10343 mdia = function mdia(track) {
10344 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
10345 };
10346
10347 mfhd = function mfhd(sequenceNumber) {
10348 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
10349 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
10350 ]));
10351 };
10352
10353 minf = function minf(track) {
10354 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
10355 };
10356
10357 moof = function moof(sequenceNumber, tracks) {
10358 var trackFragments = [],
10359 i = tracks.length; // build traf boxes for each track fragment
10360
10361 while (i--) {
10362 trackFragments[i] = traf(tracks[i]);
10363 }
10364
10365 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
10366 };
10367 /**
10368 * Returns a movie box.
10369 * @param tracks {array} the tracks associated with this movie
10370 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
10371 */
10372
10373
10374 moov = function moov(tracks) {
10375 var i = tracks.length,
10376 boxes = [];
10377
10378 while (i--) {
10379 boxes[i] = trak(tracks[i]);
10380 }
10381
10382 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
10383 };
10384
10385 mvex = function mvex(tracks) {
10386 var i = tracks.length,
10387 boxes = [];
10388
10389 while (i--) {
10390 boxes[i] = trex(tracks[i]);
10391 }
10392
10393 return box.apply(null, [types.mvex].concat(boxes));
10394 };
10395
10396 mvhd = function mvhd(duration) {
10397 var bytes = new Uint8Array([0x00, // version 0
10398 0x00, 0x00, 0x00, // flags
10399 0x00, 0x00, 0x00, 0x01, // creation_time
10400 0x00, 0x00, 0x00, 0x02, // modification_time
10401 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
10402 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
10403 0x00, 0x01, 0x00, 0x00, // 1.0 rate
10404 0x01, 0x00, // 1.0 volume
10405 0x00, 0x00, // reserved
10406 0x00, 0x00, 0x00, 0x00, // reserved
10407 0x00, 0x00, 0x00, 0x00, // reserved
10408 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10409 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10410 0xff, 0xff, 0xff, 0xff // next_track_ID
10411 ]);
10412 return box(types.mvhd, bytes);
10413 };
10414
10415 sdtp = function sdtp(track) {
10416 var samples = track.samples || [],
10417 bytes = new Uint8Array(4 + samples.length),
10418 flags,
10419 i; // leave the full box header (4 bytes) all zero
10420 // write the sample table
10421
10422 for (i = 0; i < samples.length; i++) {
10423 flags = samples[i].flags;
10424 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
10425 }
10426
10427 return box(types.sdtp, bytes);
10428 };
10429
10430 stbl = function stbl(track) {
10431 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
10432 };
10433
10434 (function () {
10435 var videoSample, audioSample;
10436
10437 stsd = function stsd(track) {
10438 return box(types.stsd, new Uint8Array([0x00, // version 0
10439 0x00, 0x00, 0x00, // flags
10440 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
10441 };
10442
10443 videoSample = function videoSample(track) {
10444 var sps = track.sps || [],
10445 pps = track.pps || [],
10446 sequenceParameterSets = [],
10447 pictureParameterSets = [],
10448 i,
10449 avc1Box; // assemble the SPSs
10450
10451 for (i = 0; i < sps.length; i++) {
10452 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
10453 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
10454
10455 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
10456 } // assemble the PPSs
10457
10458
10459 for (i = 0; i < pps.length; i++) {
10460 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
10461 pictureParameterSets.push(pps[i].byteLength & 0xFF);
10462 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
10463 }
10464
10465 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10466 0x00, 0x01, // data_reference_index
10467 0x00, 0x00, // pre_defined
10468 0x00, 0x00, // reserved
10469 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
10470 (track.width & 0xff00) >> 8, track.width & 0xff, // width
10471 (track.height & 0xff00) >> 8, track.height & 0xff, // height
10472 0x00, 0x48, 0x00, 0x00, // horizresolution
10473 0x00, 0x48, 0x00, 0x00, // vertresolution
10474 0x00, 0x00, 0x00, 0x00, // reserved
10475 0x00, 0x01, // frame_count
10476 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
10477 0x00, 0x18, // depth = 24
10478 0x11, 0x11 // pre_defined = -1
10479 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
10480 track.profileIdc, // AVCProfileIndication
10481 track.profileCompatibility, // profile_compatibility
10482 track.levelIdc, // AVCLevelIndication
10483 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
10484 ].concat([sps.length], // numOfSequenceParameterSets
10485 sequenceParameterSets, // "SPS"
10486 [pps.length], // numOfPictureParameterSets
10487 pictureParameterSets // "PPS"
10488 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
10489 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
10490 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
10491 ]))];
10492
10493 if (track.sarRatio) {
10494 var hSpacing = track.sarRatio[0],
10495 vSpacing = track.sarRatio[1];
10496 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
10497 }
10498
10499 return box.apply(null, avc1Box);
10500 };
10501
10502 audioSample = function audioSample(track) {
10503 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
10504 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10505 0x00, 0x01, // data_reference_index
10506 // AudioSampleEntry, ISO/IEC 14496-12
10507 0x00, 0x00, 0x00, 0x00, // reserved
10508 0x00, 0x00, 0x00, 0x00, // reserved
10509 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
10510 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
10511 0x00, 0x00, // pre_defined
10512 0x00, 0x00, // reserved
10513 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
10514 // MP4AudioSampleEntry, ISO/IEC 14496-14
10515 ]), esds(track));
10516 };
10517 })();
10518
10519 tkhd = function tkhd(track) {
10520 var result = new Uint8Array([0x00, // version 0
10521 0x00, 0x00, 0x07, // flags
10522 0x00, 0x00, 0x00, 0x00, // creation_time
10523 0x00, 0x00, 0x00, 0x00, // modification_time
10524 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10525 0x00, 0x00, 0x00, 0x00, // reserved
10526 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
10527 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
10528 0x00, 0x00, // layer
10529 0x00, 0x00, // alternate_group
10530 0x01, 0x00, // non-audio track volume
10531 0x00, 0x00, // reserved
10532 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
10533 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
10534 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
10535 ]);
10536 return box(types.tkhd, result);
10537 };
10538 /**
10539 * Generate a track fragment (traf) box. A traf box collects metadata
10540 * about tracks in a movie fragment (moof) box.
10541 */
10542
10543
10544 traf = function traf(track) {
10545 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
10546 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
10547 0x00, 0x00, 0x3a, // flags
10548 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10549 0x00, 0x00, 0x00, 0x01, // sample_description_index
10550 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10551 0x00, 0x00, 0x00, 0x00, // default_sample_size
10552 0x00, 0x00, 0x00, 0x00 // default_sample_flags
10553 ]));
10554 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
10555 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
10556 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
10557 0x00, 0x00, 0x00, // flags
10558 // baseMediaDecodeTime
10559 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
10560 // the containing moof to the first payload byte of the associated
10561 // mdat
10562
10563 dataOffset = 32 + // tfhd
10564 20 + // tfdt
10565 8 + // traf header
10566 16 + // mfhd
10567 8 + // moof header
10568 8; // mdat header
10569 // audio tracks require less metadata
10570
10571 if (track.type === 'audio') {
10572 trackFragmentRun = trun$1(track, dataOffset);
10573 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
10574 } // video tracks should contain an independent and disposable samples
10575 // box (sdtp)
10576 // generate one and adjust offsets to match
10577
10578
10579 sampleDependencyTable = sdtp(track);
10580 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
10581 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
10582 };
10583 /**
10584 * Generate a track box.
10585 * @param track {object} a track definition
10586 * @return {Uint8Array} the track box
10587 */
10588
10589
10590 trak = function trak(track) {
10591 track.duration = track.duration || 0xffffffff;
10592 return box(types.trak, tkhd(track), mdia(track));
10593 };
10594
10595 trex = function trex(track) {
10596 var result = new Uint8Array([0x00, // version 0
10597 0x00, 0x00, 0x00, // flags
10598 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
10599 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
10600 0x00, 0x00, 0x00, 0x00, // default_sample_duration
10601 0x00, 0x00, 0x00, 0x00, // default_sample_size
10602 0x00, 0x01, 0x00, 0x01 // default_sample_flags
10603 ]); // the last two bytes of default_sample_flags is the sample
10604 // degradation priority, a hint about the importance of this sample
10605 // relative to others. Lower the degradation priority for all sample
10606 // types other than video.
10607
10608 if (track.type !== 'video') {
10609 result[result.length - 1] = 0x00;
10610 }
10611
10612 return box(types.trex, result);
10613 };
10614
10615 (function () {
10616 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
10617 // duration is present for the first sample, it will be present for
10618 // all subsequent samples.
10619 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
10620
10621 trunHeader = function trunHeader(samples, offset) {
10622 var durationPresent = 0,
10623 sizePresent = 0,
10624 flagsPresent = 0,
10625 compositionTimeOffset = 0; // trun flag constants
10626
10627 if (samples.length) {
10628 if (samples[0].duration !== undefined) {
10629 durationPresent = 0x1;
10630 }
10631
10632 if (samples[0].size !== undefined) {
10633 sizePresent = 0x2;
10634 }
10635
10636 if (samples[0].flags !== undefined) {
10637 flagsPresent = 0x4;
10638 }
10639
10640 if (samples[0].compositionTimeOffset !== undefined) {
10641 compositionTimeOffset = 0x8;
10642 }
10643 }
10644
10645 return [0x00, // version 0
10646 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
10647 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
10648 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
10649 ];
10650 };
10651
10652 videoTrun = function videoTrun(track, offset) {
10653 var bytesOffest, bytes, header, samples, sample, i;
10654 samples = track.samples || [];
10655 offset += 8 + 12 + 16 * samples.length;
10656 header = trunHeader(samples, offset);
10657 bytes = new Uint8Array(header.length + samples.length * 16);
10658 bytes.set(header);
10659 bytesOffest = header.length;
10660
10661 for (i = 0; i < samples.length; i++) {
10662 sample = samples[i];
10663 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10664 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10665 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10666 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10667
10668 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10669 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10670 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10671 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10672
10673 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
10674 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
10675 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
10676 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
10677
10678 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
10679 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
10680 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
10681 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
10682 }
10683
10684 return box(types.trun, bytes);
10685 };
10686
10687 audioTrun = function audioTrun(track, offset) {
10688 var bytes, bytesOffest, header, samples, sample, i;
10689 samples = track.samples || [];
10690 offset += 8 + 12 + 8 * samples.length;
10691 header = trunHeader(samples, offset);
10692 bytes = new Uint8Array(header.length + samples.length * 8);
10693 bytes.set(header);
10694 bytesOffest = header.length;
10695
10696 for (i = 0; i < samples.length; i++) {
10697 sample = samples[i];
10698 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
10699 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
10700 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
10701 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
10702
10703 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
10704 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
10705 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
10706 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
10707 }
10708
10709 return box(types.trun, bytes);
10710 };
10711
10712 trun$1 = function trun(track, offset) {
10713 if (track.type === 'audio') {
10714 return audioTrun(track, offset);
10715 }
10716
10717 return videoTrun(track, offset);
10718 };
10719 })();
10720
10721 var mp4Generator = {
10722 ftyp: ftyp,
10723 mdat: mdat,
10724 moof: moof,
10725 moov: moov,
10726 initSegment: function initSegment(tracks) {
10727 var fileType = ftyp(),
10728 movie = moov(tracks),
10729 result;
10730 result = new Uint8Array(fileType.byteLength + movie.byteLength);
10731 result.set(fileType);
10732 result.set(movie, fileType.byteLength);
10733 return result;
10734 }
10735 };
10736 /**
10737 * mux.js
10738 *
10739 * Copyright (c) Brightcove
10740 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10741 */
10742 // Convert an array of nal units into an array of frames with each frame being
10743 // composed of the nal units that make up that frame
10744 // Also keep track of cummulative data about the frame from the nal units such
10745 // as the frame duration, starting pts, etc.
10746
10747 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
10748 var i,
10749 currentNal,
10750 currentFrame = [],
10751 frames = []; // TODO added for LHLS, make sure this is OK
10752
10753 frames.byteLength = 0;
10754 frames.nalCount = 0;
10755 frames.duration = 0;
10756 currentFrame.byteLength = 0;
10757
10758 for (i = 0; i < nalUnits.length; i++) {
10759 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
10760
10761 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
10762 // Since the very first nal unit is expected to be an AUD
10763 // only push to the frames array when currentFrame is not empty
10764 if (currentFrame.length) {
10765 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
10766
10767 frames.byteLength += currentFrame.byteLength;
10768 frames.nalCount += currentFrame.length;
10769 frames.duration += currentFrame.duration;
10770 frames.push(currentFrame);
10771 }
10772
10773 currentFrame = [currentNal];
10774 currentFrame.byteLength = currentNal.data.byteLength;
10775 currentFrame.pts = currentNal.pts;
10776 currentFrame.dts = currentNal.dts;
10777 } else {
10778 // Specifically flag key frames for ease of use later
10779 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
10780 currentFrame.keyFrame = true;
10781 }
10782
10783 currentFrame.duration = currentNal.dts - currentFrame.dts;
10784 currentFrame.byteLength += currentNal.data.byteLength;
10785 currentFrame.push(currentNal);
10786 }
10787 } // For the last frame, use the duration of the previous frame if we
10788 // have nothing better to go on
10789
10790
10791 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
10792 currentFrame.duration = frames[frames.length - 1].duration;
10793 } // Push the final frame
10794 // TODO added for LHLS, make sure this is OK
10795
10796
10797 frames.byteLength += currentFrame.byteLength;
10798 frames.nalCount += currentFrame.length;
10799 frames.duration += currentFrame.duration;
10800 frames.push(currentFrame);
10801 return frames;
10802 }; // Convert an array of frames into an array of Gop with each Gop being composed
10803 // of the frames that make up that Gop
10804 // Also keep track of cummulative data about the Gop from the frames such as the
10805 // Gop duration, starting pts, etc.
10806
10807
10808 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
10809 var i,
10810 currentFrame,
10811 currentGop = [],
10812 gops = []; // We must pre-set some of the values on the Gop since we
10813 // keep running totals of these values
10814
10815 currentGop.byteLength = 0;
10816 currentGop.nalCount = 0;
10817 currentGop.duration = 0;
10818 currentGop.pts = frames[0].pts;
10819 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
10820
10821 gops.byteLength = 0;
10822 gops.nalCount = 0;
10823 gops.duration = 0;
10824 gops.pts = frames[0].pts;
10825 gops.dts = frames[0].dts;
10826
10827 for (i = 0; i < frames.length; i++) {
10828 currentFrame = frames[i];
10829
10830 if (currentFrame.keyFrame) {
10831 // Since the very first frame is expected to be an keyframe
10832 // only push to the gops array when currentGop is not empty
10833 if (currentGop.length) {
10834 gops.push(currentGop);
10835 gops.byteLength += currentGop.byteLength;
10836 gops.nalCount += currentGop.nalCount;
10837 gops.duration += currentGop.duration;
10838 }
10839
10840 currentGop = [currentFrame];
10841 currentGop.nalCount = currentFrame.length;
10842 currentGop.byteLength = currentFrame.byteLength;
10843 currentGop.pts = currentFrame.pts;
10844 currentGop.dts = currentFrame.dts;
10845 currentGop.duration = currentFrame.duration;
10846 } else {
10847 currentGop.duration += currentFrame.duration;
10848 currentGop.nalCount += currentFrame.length;
10849 currentGop.byteLength += currentFrame.byteLength;
10850 currentGop.push(currentFrame);
10851 }
10852 }
10853
10854 if (gops.length && currentGop.duration <= 0) {
10855 currentGop.duration = gops[gops.length - 1].duration;
10856 }
10857
10858 gops.byteLength += currentGop.byteLength;
10859 gops.nalCount += currentGop.nalCount;
10860 gops.duration += currentGop.duration; // push the final Gop
10861
10862 gops.push(currentGop);
10863 return gops;
10864 };
10865 /*
10866 * Search for the first keyframe in the GOPs and throw away all frames
10867 * until that keyframe. Then extend the duration of the pulled keyframe
10868 * and pull the PTS and DTS of the keyframe so that it covers the time
10869 * range of the frames that were disposed.
10870 *
10871 * @param {Array} gops video GOPs
10872 * @returns {Array} modified video GOPs
10873 */
10874
10875
10876 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
10877 var currentGop;
10878
10879 if (!gops[0][0].keyFrame && gops.length > 1) {
10880 // Remove the first GOP
10881 currentGop = gops.shift();
10882 gops.byteLength -= currentGop.byteLength;
10883 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
10884 // first gop to cover the time period of the
10885 // frames we just removed
10886
10887 gops[0][0].dts = currentGop.dts;
10888 gops[0][0].pts = currentGop.pts;
10889 gops[0][0].duration += currentGop.duration;
10890 }
10891
10892 return gops;
10893 };
10894 /**
10895 * Default sample object
10896 * see ISO/IEC 14496-12:2012, section 8.6.4.3
10897 */
10898
10899
10900 var createDefaultSample = function createDefaultSample() {
10901 return {
10902 size: 0,
10903 flags: {
10904 isLeading: 0,
10905 dependsOn: 1,
10906 isDependedOn: 0,
10907 hasRedundancy: 0,
10908 degradationPriority: 0,
10909 isNonSyncSample: 1
10910 }
10911 };
10912 };
10913 /*
10914 * Collates information from a video frame into an object for eventual
10915 * entry into an MP4 sample table.
10916 *
10917 * @param {Object} frame the video frame
10918 * @param {Number} dataOffset the byte offset to position the sample
10919 * @return {Object} object containing sample table info for a frame
10920 */
10921
10922
10923 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
10924 var sample = createDefaultSample();
10925 sample.dataOffset = dataOffset;
10926 sample.compositionTimeOffset = frame.pts - frame.dts;
10927 sample.duration = frame.duration;
10928 sample.size = 4 * frame.length; // Space for nal unit size
10929
10930 sample.size += frame.byteLength;
10931
10932 if (frame.keyFrame) {
10933 sample.flags.dependsOn = 2;
10934 sample.flags.isNonSyncSample = 0;
10935 }
10936
10937 return sample;
10938 }; // generate the track's sample table from an array of gops
10939
10940
10941 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
10942 var h,
10943 i,
10944 sample,
10945 currentGop,
10946 currentFrame,
10947 dataOffset = baseDataOffset || 0,
10948 samples = [];
10949
10950 for (h = 0; h < gops.length; h++) {
10951 currentGop = gops[h];
10952
10953 for (i = 0; i < currentGop.length; i++) {
10954 currentFrame = currentGop[i];
10955 sample = sampleForFrame(currentFrame, dataOffset);
10956 dataOffset += sample.size;
10957 samples.push(sample);
10958 }
10959 }
10960
10961 return samples;
10962 }; // generate the track's raw mdat data from an array of gops
10963
10964
10965 var concatenateNalData = function concatenateNalData(gops) {
10966 var h,
10967 i,
10968 j,
10969 currentGop,
10970 currentFrame,
10971 currentNal,
10972 dataOffset = 0,
10973 nalsByteLength = gops.byteLength,
10974 numberOfNals = gops.nalCount,
10975 totalByteLength = nalsByteLength + 4 * numberOfNals,
10976 data = new Uint8Array(totalByteLength),
10977 view = new DataView(data.buffer); // For each Gop..
10978
10979 for (h = 0; h < gops.length; h++) {
10980 currentGop = gops[h]; // For each Frame..
10981
10982 for (i = 0; i < currentGop.length; i++) {
10983 currentFrame = currentGop[i]; // For each NAL..
10984
10985 for (j = 0; j < currentFrame.length; j++) {
10986 currentNal = currentFrame[j];
10987 view.setUint32(dataOffset, currentNal.data.byteLength);
10988 dataOffset += 4;
10989 data.set(currentNal.data, dataOffset);
10990 dataOffset += currentNal.data.byteLength;
10991 }
10992 }
10993 }
10994
10995 return data;
10996 }; // generate the track's sample table from a frame
10997
10998
10999 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
11000 var sample,
11001 dataOffset = baseDataOffset || 0,
11002 samples = [];
11003 sample = sampleForFrame(frame, dataOffset);
11004 samples.push(sample);
11005 return samples;
11006 }; // generate the track's raw mdat data from a frame
11007
11008
11009 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
11010 var i,
11011 currentNal,
11012 dataOffset = 0,
11013 nalsByteLength = frame.byteLength,
11014 numberOfNals = frame.length,
11015 totalByteLength = nalsByteLength + 4 * numberOfNals,
11016 data = new Uint8Array(totalByteLength),
11017 view = new DataView(data.buffer); // For each NAL..
11018
11019 for (i = 0; i < frame.length; i++) {
11020 currentNal = frame[i];
11021 view.setUint32(dataOffset, currentNal.data.byteLength);
11022 dataOffset += 4;
11023 data.set(currentNal.data, dataOffset);
11024 dataOffset += currentNal.data.byteLength;
11025 }
11026
11027 return data;
11028 };
11029
11030 var frameUtils = {
11031 groupNalsIntoFrames: groupNalsIntoFrames,
11032 groupFramesIntoGops: groupFramesIntoGops,
11033 extendFirstKeyFrame: extendFirstKeyFrame,
11034 generateSampleTable: generateSampleTable$1,
11035 concatenateNalData: concatenateNalData,
11036 generateSampleTableForFrame: generateSampleTableForFrame,
11037 concatenateNalDataForFrame: concatenateNalDataForFrame
11038 };
11039 /**
11040 * mux.js
11041 *
11042 * Copyright (c) Brightcove
11043 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11044 */
11045
11046 var highPrefix = [33, 16, 5, 32, 164, 27];
11047 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
11048
11049 var zeroFill = function zeroFill(count) {
11050 var a = [];
11051
11052 while (count--) {
11053 a.push(0);
11054 }
11055
11056 return a;
11057 };
11058
11059 var makeTable = function makeTable(metaTable) {
11060 return Object.keys(metaTable).reduce(function (obj, key) {
11061 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
11062 return arr.concat(part);
11063 }, []));
11064 return obj;
11065 }, {});
11066 };
11067
11068 var silence;
11069
11070 var silence_1 = function silence_1() {
11071 if (!silence) {
11072 // Frames-of-silence to use for filling in missing AAC frames
11073 var coneOfSilence = {
11074 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
11075 88200: [highPrefix, [231], zeroFill(170), [56]],
11076 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
11077 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
11078 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
11079 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
11080 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
11081 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
11082 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
11083 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
11084 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
11085 };
11086 silence = makeTable(coneOfSilence);
11087 }
11088
11089 return silence;
11090 };
11091 /**
11092 * mux.js
11093 *
11094 * Copyright (c) Brightcove
11095 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11096 */
11097
11098
11099 var ONE_SECOND_IN_TS$4 = 90000,
11100 // 90kHz clock
11101 secondsToVideoTs,
11102 secondsToAudioTs,
11103 videoTsToSeconds,
11104 audioTsToSeconds,
11105 audioTsToVideoTs,
11106 videoTsToAudioTs,
11107 metadataTsToSeconds;
11108
11109 secondsToVideoTs = function secondsToVideoTs(seconds) {
11110 return seconds * ONE_SECOND_IN_TS$4;
11111 };
11112
11113 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
11114 return seconds * sampleRate;
11115 };
11116
11117 videoTsToSeconds = function videoTsToSeconds(timestamp) {
11118 return timestamp / ONE_SECOND_IN_TS$4;
11119 };
11120
11121 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
11122 return timestamp / sampleRate;
11123 };
11124
11125 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
11126 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
11127 };
11128
11129 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
11130 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
11131 };
11132 /**
11133 * Adjust ID3 tag or caption timing information by the timeline pts values
11134 * (if keepOriginalTimestamps is false) and convert to seconds
11135 */
11136
11137
11138 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
11139 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
11140 };
11141
11142 var clock = {
11143 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
11144 secondsToVideoTs: secondsToVideoTs,
11145 secondsToAudioTs: secondsToAudioTs,
11146 videoTsToSeconds: videoTsToSeconds,
11147 audioTsToSeconds: audioTsToSeconds,
11148 audioTsToVideoTs: audioTsToVideoTs,
11149 videoTsToAudioTs: videoTsToAudioTs,
11150 metadataTsToSeconds: metadataTsToSeconds
11151 };
11152 /**
11153 * mux.js
11154 *
11155 * Copyright (c) Brightcove
11156 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11157 */
11158
11159 /**
11160 * Sum the `byteLength` properties of the data in each AAC frame
11161 */
11162
11163 var sumFrameByteLengths = function sumFrameByteLengths(array) {
11164 var i,
11165 currentObj,
11166 sum = 0; // sum the byteLength's all each nal unit in the frame
11167
11168 for (i = 0; i < array.length; i++) {
11169 currentObj = array[i];
11170 sum += currentObj.data.byteLength;
11171 }
11172
11173 return sum;
11174 }; // Possibly pad (prefix) the audio track with silence if appending this track
11175 // would lead to the introduction of a gap in the audio buffer
11176
11177
11178 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
11179 var baseMediaDecodeTimeTs,
11180 frameDuration = 0,
11181 audioGapDuration = 0,
11182 audioFillFrameCount = 0,
11183 audioFillDuration = 0,
11184 silentFrame,
11185 i,
11186 firstFrame;
11187
11188 if (!frames.length) {
11189 return;
11190 }
11191
11192 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
11193
11194 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
11195
11196 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
11197 // insert the shortest possible amount (audio gap or audio to video gap)
11198 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
11199
11200 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
11201 audioFillDuration = audioFillFrameCount * frameDuration;
11202 } // don't attempt to fill gaps smaller than a single frame or larger
11203 // than a half second
11204
11205
11206 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
11207 return;
11208 }
11209
11210 silentFrame = silence_1()[track.samplerate];
11211
11212 if (!silentFrame) {
11213 // we don't have a silent frame pregenerated for the sample rate, so use a frame
11214 // from the content instead
11215 silentFrame = frames[0].data;
11216 }
11217
11218 for (i = 0; i < audioFillFrameCount; i++) {
11219 firstFrame = frames[0];
11220 frames.splice(0, 0, {
11221 data: silentFrame,
11222 dts: firstFrame.dts - frameDuration,
11223 pts: firstFrame.pts - frameDuration
11224 });
11225 }
11226
11227 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
11228 return audioFillDuration;
11229 }; // If the audio segment extends before the earliest allowed dts
11230 // value, remove AAC frames until starts at or after the earliest
11231 // allowed DTS so that we don't end up with a negative baseMedia-
11232 // DecodeTime for the audio track
11233
11234
11235 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
11236 if (track.minSegmentDts >= earliestAllowedDts) {
11237 return adtsFrames;
11238 } // We will need to recalculate the earliest segment Dts
11239
11240
11241 track.minSegmentDts = Infinity;
11242 return adtsFrames.filter(function (currentFrame) {
11243 // If this is an allowed frame, keep it and record it's Dts
11244 if (currentFrame.dts >= earliestAllowedDts) {
11245 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
11246 track.minSegmentPts = track.minSegmentDts;
11247 return true;
11248 } // Otherwise, discard it
11249
11250
11251 return false;
11252 });
11253 }; // generate the track's raw mdat data from an array of frames
11254
11255
11256 var generateSampleTable = function generateSampleTable(frames) {
11257 var i,
11258 currentFrame,
11259 samples = [];
11260
11261 for (i = 0; i < frames.length; i++) {
11262 currentFrame = frames[i];
11263 samples.push({
11264 size: currentFrame.data.byteLength,
11265 duration: 1024 // For AAC audio, all samples contain 1024 samples
11266
11267 });
11268 }
11269
11270 return samples;
11271 }; // generate the track's sample table from an array of frames
11272
11273
11274 var concatenateFrameData = function concatenateFrameData(frames) {
11275 var i,
11276 currentFrame,
11277 dataOffset = 0,
11278 data = new Uint8Array(sumFrameByteLengths(frames));
11279
11280 for (i = 0; i < frames.length; i++) {
11281 currentFrame = frames[i];
11282 data.set(currentFrame.data, dataOffset);
11283 dataOffset += currentFrame.data.byteLength;
11284 }
11285
11286 return data;
11287 };
11288
11289 var audioFrameUtils = {
11290 prefixWithSilence: prefixWithSilence,
11291 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
11292 generateSampleTable: generateSampleTable,
11293 concatenateFrameData: concatenateFrameData
11294 };
11295 /**
11296 * mux.js
11297 *
11298 * Copyright (c) Brightcove
11299 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11300 */
11301
11302 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
11303 /**
11304 * Store information about the start and end of the track and the
11305 * duration for each frame/sample we process in order to calculate
11306 * the baseMediaDecodeTime
11307 */
11308
11309 var collectDtsInfo = function collectDtsInfo(track, data) {
11310 if (typeof data.pts === 'number') {
11311 if (track.timelineStartInfo.pts === undefined) {
11312 track.timelineStartInfo.pts = data.pts;
11313 }
11314
11315 if (track.minSegmentPts === undefined) {
11316 track.minSegmentPts = data.pts;
11317 } else {
11318 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
11319 }
11320
11321 if (track.maxSegmentPts === undefined) {
11322 track.maxSegmentPts = data.pts;
11323 } else {
11324 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
11325 }
11326 }
11327
11328 if (typeof data.dts === 'number') {
11329 if (track.timelineStartInfo.dts === undefined) {
11330 track.timelineStartInfo.dts = data.dts;
11331 }
11332
11333 if (track.minSegmentDts === undefined) {
11334 track.minSegmentDts = data.dts;
11335 } else {
11336 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
11337 }
11338
11339 if (track.maxSegmentDts === undefined) {
11340 track.maxSegmentDts = data.dts;
11341 } else {
11342 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
11343 }
11344 }
11345 };
11346 /**
11347 * Clear values used to calculate the baseMediaDecodeTime between
11348 * tracks
11349 */
11350
11351
11352 var clearDtsInfo = function clearDtsInfo(track) {
11353 delete track.minSegmentDts;
11354 delete track.maxSegmentDts;
11355 delete track.minSegmentPts;
11356 delete track.maxSegmentPts;
11357 };
11358 /**
11359 * Calculate the track's baseMediaDecodeTime based on the earliest
11360 * DTS the transmuxer has ever seen and the minimum DTS for the
11361 * current track
11362 * @param track {object} track metadata configuration
11363 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
11364 * in the source; false to adjust the first segment to start at 0.
11365 */
11366
11367
11368 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
11369 var baseMediaDecodeTime,
11370 scale,
11371 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
11372
11373 if (!keepOriginalTimestamps) {
11374 minSegmentDts -= track.timelineStartInfo.dts;
11375 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
11376 // we want the start of the first segment to be placed
11377
11378
11379 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
11380
11381 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
11382
11383 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
11384
11385 if (track.type === 'audio') {
11386 // Audio has a different clock equal to the sampling_rate so we need to
11387 // scale the PTS values into the clock rate of the track
11388 scale = track.samplerate / ONE_SECOND_IN_TS$3;
11389 baseMediaDecodeTime *= scale;
11390 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
11391 }
11392
11393 return baseMediaDecodeTime;
11394 };
11395
11396 var trackDecodeInfo = {
11397 clearDtsInfo: clearDtsInfo,
11398 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
11399 collectDtsInfo: collectDtsInfo
11400 };
11401 /**
11402 * mux.js
11403 *
11404 * Copyright (c) Brightcove
11405 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
11406 *
11407 * Reads in-band caption information from a video elementary
11408 * stream. Captions must follow the CEA-708 standard for injection
11409 * into an MPEG-2 transport streams.
11410 * @see https://en.wikipedia.org/wiki/CEA-708
11411 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
11412 */
11413 // payload type field to indicate how they are to be
11414 // interpreted. CEAS-708 caption content is always transmitted with
11415 // payload type 0x04.
11416
11417 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
11418 RBSP_TRAILING_BITS = 128;
11419 /**
11420 * Parse a supplemental enhancement information (SEI) NAL unit.
11421 * Stops parsing once a message of type ITU T T35 has been found.
11422 *
11423 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
11424 * @return {object} the parsed SEI payload
11425 * @see Rec. ITU-T H.264, 7.3.2.3.1
11426 */
11427
11428 var parseSei = function parseSei(bytes) {
11429 var i = 0,
11430 result = {
11431 payloadType: -1,
11432 payloadSize: 0
11433 },
11434 payloadType = 0,
11435 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
11436
11437 while (i < bytes.byteLength) {
11438 // stop once we have hit the end of the sei_rbsp
11439 if (bytes[i] === RBSP_TRAILING_BITS) {
11440 break;
11441 } // Parse payload type
11442
11443
11444 while (bytes[i] === 0xFF) {
11445 payloadType += 255;
11446 i++;
11447 }
11448
11449 payloadType += bytes[i++]; // Parse payload size
11450
11451 while (bytes[i] === 0xFF) {
11452 payloadSize += 255;
11453 i++;
11454 }
11455
11456 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
11457 // there can only ever be one caption message in a frame's sei
11458
11459 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
11460 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
11461
11462 if (userIdentifier === 'GA94') {
11463 result.payloadType = payloadType;
11464 result.payloadSize = payloadSize;
11465 result.payload = bytes.subarray(i, i + payloadSize);
11466 break;
11467 } else {
11468 result.payload = void 0;
11469 }
11470 } // skip the payload and parse the next message
11471
11472
11473 i += payloadSize;
11474 payloadType = 0;
11475 payloadSize = 0;
11476 }
11477
11478 return result;
11479 }; // see ANSI/SCTE 128-1 (2013), section 8.1
11480
11481
11482 var parseUserData = function parseUserData(sei) {
11483 // itu_t_t35_contry_code must be 181 (United States) for
11484 // captions
11485 if (sei.payload[0] !== 181) {
11486 return null;
11487 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
11488
11489
11490 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
11491 return null;
11492 } // the user_identifier should be "GA94" to indicate ATSC1 data
11493
11494
11495 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
11496 return null;
11497 } // finally, user_data_type_code should be 0x03 for caption data
11498
11499
11500 if (sei.payload[7] !== 0x03) {
11501 return null;
11502 } // return the user_data_type_structure and strip the trailing
11503 // marker bits
11504
11505
11506 return sei.payload.subarray(8, sei.payload.length - 1);
11507 }; // see CEA-708-D, section 4.4
11508
11509
11510 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
11511 var results = [],
11512 i,
11513 count,
11514 offset,
11515 data; // if this is just filler, return immediately
11516
11517 if (!(userData[0] & 0x40)) {
11518 return results;
11519 } // parse out the cc_data_1 and cc_data_2 fields
11520
11521
11522 count = userData[0] & 0x1f;
11523
11524 for (i = 0; i < count; i++) {
11525 offset = i * 3;
11526 data = {
11527 type: userData[offset + 2] & 0x03,
11528 pts: pts
11529 }; // capture cc data when cc_valid is 1
11530
11531 if (userData[offset + 2] & 0x04) {
11532 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
11533 results.push(data);
11534 }
11535 }
11536
11537 return results;
11538 };
11539
11540 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
11541 var length = data.byteLength,
11542 emulationPreventionBytesPositions = [],
11543 i = 1,
11544 newLength,
11545 newData; // Find all `Emulation Prevention Bytes`
11546
11547 while (i < length - 2) {
11548 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
11549 emulationPreventionBytesPositions.push(i + 2);
11550 i += 2;
11551 } else {
11552 i++;
11553 }
11554 } // If no Emulation Prevention Bytes were found just return the original
11555 // array
11556
11557
11558 if (emulationPreventionBytesPositions.length === 0) {
11559 return data;
11560 } // Create a new array to hold the NAL unit data
11561
11562
11563 newLength = length - emulationPreventionBytesPositions.length;
11564 newData = new Uint8Array(newLength);
11565 var sourceIndex = 0;
11566
11567 for (i = 0; i < newLength; sourceIndex++, i++) {
11568 if (sourceIndex === emulationPreventionBytesPositions[0]) {
11569 // Skip this byte
11570 sourceIndex++; // Remove this position index
11571
11572 emulationPreventionBytesPositions.shift();
11573 }
11574
11575 newData[i] = data[sourceIndex];
11576 }
11577
11578 return newData;
11579 }; // exports
11580
11581
11582 var captionPacketParser = {
11583 parseSei: parseSei,
11584 parseUserData: parseUserData,
11585 parseCaptionPackets: parseCaptionPackets,
11586 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
11587 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
11588 }; // Link To Transport
11589 // -----------------
11590
11591 var CaptionStream$1 = function CaptionStream(options) {
11592 options = options || {};
11593 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
11594
11595 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
11596 this.captionPackets_ = [];
11597 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
11598 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
11599 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
11600 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
11601 ];
11602
11603 if (this.parse708captions_) {
11604 this.cc708Stream_ = new Cea708Stream({
11605 captionServices: options.captionServices
11606 }); // eslint-disable-line no-use-before-define
11607 }
11608
11609 this.reset(); // forward data and done events from CCs to this CaptionStream
11610
11611 this.ccStreams_.forEach(function (cc) {
11612 cc.on('data', this.trigger.bind(this, 'data'));
11613 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
11614 cc.on('done', this.trigger.bind(this, 'done'));
11615 }, this);
11616
11617 if (this.parse708captions_) {
11618 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
11619 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
11620 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
11621 }
11622 };
11623
11624 CaptionStream$1.prototype = new stream();
11625
11626 CaptionStream$1.prototype.push = function (event) {
11627 var sei, userData, newCaptionPackets; // only examine SEI NALs
11628
11629 if (event.nalUnitType !== 'sei_rbsp') {
11630 return;
11631 } // parse the sei
11632
11633
11634 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
11635
11636 if (!sei.payload) {
11637 return;
11638 } // ignore everything but user_data_registered_itu_t_t35
11639
11640
11641 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
11642 return;
11643 } // parse out the user data payload
11644
11645
11646 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
11647
11648 if (!userData) {
11649 return;
11650 } // Sometimes, the same segment # will be downloaded twice. To stop the
11651 // caption data from being processed twice, we track the latest dts we've
11652 // received and ignore everything with a dts before that. However, since
11653 // data for a specific dts can be split across packets on either side of
11654 // a segment boundary, we need to make sure we *don't* ignore the packets
11655 // from the *next* segment that have dts === this.latestDts_. By constantly
11656 // tracking the number of packets received with dts === this.latestDts_, we
11657 // know how many should be ignored once we start receiving duplicates.
11658
11659
11660 if (event.dts < this.latestDts_) {
11661 // We've started getting older data, so set the flag.
11662 this.ignoreNextEqualDts_ = true;
11663 return;
11664 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
11665 this.numSameDts_--;
11666
11667 if (!this.numSameDts_) {
11668 // We've received the last duplicate packet, time to start processing again
11669 this.ignoreNextEqualDts_ = false;
11670 }
11671
11672 return;
11673 } // parse out CC data packets and save them for later
11674
11675
11676 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
11677 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
11678
11679 if (this.latestDts_ !== event.dts) {
11680 this.numSameDts_ = 0;
11681 }
11682
11683 this.numSameDts_++;
11684 this.latestDts_ = event.dts;
11685 };
11686
11687 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
11688 this.ccStreams_.forEach(function (cc) {
11689 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
11690 }, this);
11691 };
11692
11693 CaptionStream$1.prototype.flushStream = function (flushType) {
11694 // make sure we actually parsed captions before proceeding
11695 if (!this.captionPackets_.length) {
11696 this.flushCCStreams(flushType);
11697 return;
11698 } // In Chrome, the Array#sort function is not stable so add a
11699 // presortIndex that we can use to ensure we get a stable-sort
11700
11701
11702 this.captionPackets_.forEach(function (elem, idx) {
11703 elem.presortIndex = idx;
11704 }); // sort caption byte-pairs based on their PTS values
11705
11706 this.captionPackets_.sort(function (a, b) {
11707 if (a.pts === b.pts) {
11708 return a.presortIndex - b.presortIndex;
11709 }
11710
11711 return a.pts - b.pts;
11712 });
11713 this.captionPackets_.forEach(function (packet) {
11714 if (packet.type < 2) {
11715 // Dispatch packet to the right Cea608Stream
11716 this.dispatchCea608Packet(packet);
11717 } else {
11718 // Dispatch packet to the Cea708Stream
11719 this.dispatchCea708Packet(packet);
11720 }
11721 }, this);
11722 this.captionPackets_.length = 0;
11723 this.flushCCStreams(flushType);
11724 };
11725
11726 CaptionStream$1.prototype.flush = function () {
11727 return this.flushStream('flush');
11728 }; // Only called if handling partial data
11729
11730
11731 CaptionStream$1.prototype.partialFlush = function () {
11732 return this.flushStream('partialFlush');
11733 };
11734
11735 CaptionStream$1.prototype.reset = function () {
11736 this.latestDts_ = null;
11737 this.ignoreNextEqualDts_ = false;
11738 this.numSameDts_ = 0;
11739 this.activeCea608Channel_ = [null, null];
11740 this.ccStreams_.forEach(function (ccStream) {
11741 ccStream.reset();
11742 });
11743 }; // From the CEA-608 spec:
11744
11745 /*
11746 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
11747 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
11748 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
11749 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
11750 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
11751 * to switch to captioning or Text.
11752 */
11753 // With that in mind, we ignore any data between an XDS control code and a
11754 // subsequent closed-captioning control code.
11755
11756
11757 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
11758 // NOTE: packet.type is the CEA608 field
11759 if (this.setsTextOrXDSActive(packet)) {
11760 this.activeCea608Channel_[packet.type] = null;
11761 } else if (this.setsChannel1Active(packet)) {
11762 this.activeCea608Channel_[packet.type] = 0;
11763 } else if (this.setsChannel2Active(packet)) {
11764 this.activeCea608Channel_[packet.type] = 1;
11765 }
11766
11767 if (this.activeCea608Channel_[packet.type] === null) {
11768 // If we haven't received anything to set the active channel, or the
11769 // packets are Text/XDS data, discard the data; we don't want jumbled
11770 // captions
11771 return;
11772 }
11773
11774 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
11775 };
11776
11777 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
11778 return (packet.ccData & 0x7800) === 0x1000;
11779 };
11780
11781 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
11782 return (packet.ccData & 0x7800) === 0x1800;
11783 };
11784
11785 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
11786 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
11787 };
11788
11789 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
11790 if (this.parse708captions_) {
11791 this.cc708Stream_.push(packet);
11792 }
11793 }; // ----------------------
11794 // Session to Application
11795 // ----------------------
11796 // This hash maps special and extended character codes to their
11797 // proper Unicode equivalent. The first one-byte key is just a
11798 // non-standard character code. The two-byte keys that follow are
11799 // the extended CEA708 character codes, along with the preceding
11800 // 0x10 extended character byte to distinguish these codes from
11801 // non-extended character codes. Every CEA708 character code that
11802 // is not in this object maps directly to a standard unicode
11803 // character code.
11804 // The transparent space and non-breaking transparent space are
11805 // technically not fully supported since there is no code to
11806 // make them transparent, so they have normal non-transparent
11807 // stand-ins.
11808 // The special closed caption (CC) character isn't a standard
11809 // unicode character, so a fairly similar unicode character was
11810 // chosen in it's place.
11811
11812
11813 var CHARACTER_TRANSLATION_708 = {
11814 0x7f: 0x266a,
11815 // ♪
11816 0x1020: 0x20,
11817 // Transparent Space
11818 0x1021: 0xa0,
11819 // Nob-breaking Transparent Space
11820 0x1025: 0x2026,
11821 // …
11822 0x102a: 0x0160,
11823 // Š
11824 0x102c: 0x0152,
11825 // Œ
11826 0x1030: 0x2588,
11827 // █
11828 0x1031: 0x2018,
11829 // ‘
11830 0x1032: 0x2019,
11831 // ’
11832 0x1033: 0x201c,
11833 // “
11834 0x1034: 0x201d,
11835 // ”
11836 0x1035: 0x2022,
11837 // •
11838 0x1039: 0x2122,
11839 // ™
11840 0x103a: 0x0161,
11841 // š
11842 0x103c: 0x0153,
11843 // œ
11844 0x103d: 0x2120,
11845 // ℠
11846 0x103f: 0x0178,
11847 // Ÿ
11848 0x1076: 0x215b,
11849 // ⅛
11850 0x1077: 0x215c,
11851 // ⅜
11852 0x1078: 0x215d,
11853 // ⅝
11854 0x1079: 0x215e,
11855 // ⅞
11856 0x107a: 0x23d0,
11857 // ⏐
11858 0x107b: 0x23a4,
11859 // ⎤
11860 0x107c: 0x23a3,
11861 // ⎣
11862 0x107d: 0x23af,
11863 // ⎯
11864 0x107e: 0x23a6,
11865 // ⎦
11866 0x107f: 0x23a1,
11867 // ⎡
11868 0x10a0: 0x3138 // ㄸ (CC char)
11869
11870 };
11871
11872 var get708CharFromCode = function get708CharFromCode(code) {
11873 var newCode = CHARACTER_TRANSLATION_708[code] || code;
11874
11875 if (code & 0x1000 && code === newCode) {
11876 // Invalid extended code
11877 return '';
11878 }
11879
11880 return String.fromCharCode(newCode);
11881 };
11882
11883 var within708TextBlock = function within708TextBlock(b) {
11884 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
11885 };
11886
11887 var Cea708Window = function Cea708Window(windowNum) {
11888 this.windowNum = windowNum;
11889 this.reset();
11890 };
11891
11892 Cea708Window.prototype.reset = function () {
11893 this.clearText();
11894 this.pendingNewLine = false;
11895 this.winAttr = {};
11896 this.penAttr = {};
11897 this.penLoc = {};
11898 this.penColor = {}; // These default values are arbitrary,
11899 // defineWindow will usually override them
11900
11901 this.visible = 0;
11902 this.rowLock = 0;
11903 this.columnLock = 0;
11904 this.priority = 0;
11905 this.relativePositioning = 0;
11906 this.anchorVertical = 0;
11907 this.anchorHorizontal = 0;
11908 this.anchorPoint = 0;
11909 this.rowCount = 1;
11910 this.virtualRowCount = this.rowCount + 1;
11911 this.columnCount = 41;
11912 this.windowStyle = 0;
11913 this.penStyle = 0;
11914 };
11915
11916 Cea708Window.prototype.getText = function () {
11917 return this.rows.join('\n');
11918 };
11919
11920 Cea708Window.prototype.clearText = function () {
11921 this.rows = [''];
11922 this.rowIdx = 0;
11923 };
11924
11925 Cea708Window.prototype.newLine = function (pts) {
11926 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
11927 this.beforeRowOverflow(pts);
11928 }
11929
11930 if (this.rows.length > 0) {
11931 this.rows.push('');
11932 this.rowIdx++;
11933 } // Show all virtual rows since there's no visible scrolling
11934
11935
11936 while (this.rows.length > this.virtualRowCount) {
11937 this.rows.shift();
11938 this.rowIdx--;
11939 }
11940 };
11941
11942 Cea708Window.prototype.isEmpty = function () {
11943 if (this.rows.length === 0) {
11944 return true;
11945 } else if (this.rows.length === 1) {
11946 return this.rows[0] === '';
11947 }
11948
11949 return false;
11950 };
11951
11952 Cea708Window.prototype.addText = function (text) {
11953 this.rows[this.rowIdx] += text;
11954 };
11955
11956 Cea708Window.prototype.backspace = function () {
11957 if (!this.isEmpty()) {
11958 var row = this.rows[this.rowIdx];
11959 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
11960 }
11961 };
11962
11963 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
11964 this.serviceNum = serviceNum;
11965 this.text = '';
11966 this.currentWindow = new Cea708Window(-1);
11967 this.windows = [];
11968 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
11969
11970 if (typeof encoding === 'string') {
11971 this.createTextDecoder(encoding);
11972 }
11973 };
11974 /**
11975 * Initialize service windows
11976 * Must be run before service use
11977 *
11978 * @param {Integer} pts PTS value
11979 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
11980 */
11981
11982
11983 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
11984 this.startPts = pts;
11985
11986 for (var win = 0; win < 8; win++) {
11987 this.windows[win] = new Cea708Window(win);
11988
11989 if (typeof beforeRowOverflow === 'function') {
11990 this.windows[win].beforeRowOverflow = beforeRowOverflow;
11991 }
11992 }
11993 };
11994 /**
11995 * Set current window of service to be affected by commands
11996 *
11997 * @param {Integer} windowNum Window number
11998 */
11999
12000
12001 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
12002 this.currentWindow = this.windows[windowNum];
12003 };
12004 /**
12005 * Try to create a TextDecoder if it is natively supported
12006 */
12007
12008
12009 Cea708Service.prototype.createTextDecoder = function (encoding) {
12010 if (typeof TextDecoder === 'undefined') {
12011 this.stream.trigger('log', {
12012 level: 'warn',
12013 message: 'The `encoding` option is unsupported without TextDecoder support'
12014 });
12015 } else {
12016 try {
12017 this.textDecoder_ = new TextDecoder(encoding);
12018 } catch (error) {
12019 this.stream.trigger('log', {
12020 level: 'warn',
12021 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
12022 });
12023 }
12024 }
12025 };
12026
12027 var Cea708Stream = function Cea708Stream(options) {
12028 options = options || {};
12029 Cea708Stream.prototype.init.call(this);
12030 var self = this;
12031 var captionServices = options.captionServices || {};
12032 var captionServiceEncodings = {};
12033 var serviceProps; // Get service encodings from captionServices option block
12034
12035 Object.keys(captionServices).forEach(function (serviceName) {
12036 serviceProps = captionServices[serviceName];
12037
12038 if (/^SERVICE/.test(serviceName)) {
12039 captionServiceEncodings[serviceName] = serviceProps.encoding;
12040 }
12041 });
12042 this.serviceEncodings = captionServiceEncodings;
12043 this.current708Packet = null;
12044 this.services = {};
12045
12046 this.push = function (packet) {
12047 if (packet.type === 3) {
12048 // 708 packet start
12049 self.new708Packet();
12050 self.add708Bytes(packet);
12051 } else {
12052 if (self.current708Packet === null) {
12053 // This should only happen at the start of a file if there's no packet start.
12054 self.new708Packet();
12055 }
12056
12057 self.add708Bytes(packet);
12058 }
12059 };
12060 };
12061
12062 Cea708Stream.prototype = new stream();
12063 /**
12064 * Push current 708 packet, create new 708 packet.
12065 */
12066
12067 Cea708Stream.prototype.new708Packet = function () {
12068 if (this.current708Packet !== null) {
12069 this.push708Packet();
12070 }
12071
12072 this.current708Packet = {
12073 data: [],
12074 ptsVals: []
12075 };
12076 };
12077 /**
12078 * Add pts and both bytes from packet into current 708 packet.
12079 */
12080
12081
12082 Cea708Stream.prototype.add708Bytes = function (packet) {
12083 var data = packet.ccData;
12084 var byte0 = data >>> 8;
12085 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
12086 // that service blocks will always line up with byte pairs.
12087
12088 this.current708Packet.ptsVals.push(packet.pts);
12089 this.current708Packet.data.push(byte0);
12090 this.current708Packet.data.push(byte1);
12091 };
12092 /**
12093 * Parse completed 708 packet into service blocks and push each service block.
12094 */
12095
12096
12097 Cea708Stream.prototype.push708Packet = function () {
12098 var packet708 = this.current708Packet;
12099 var packetData = packet708.data;
12100 var serviceNum = null;
12101 var blockSize = null;
12102 var i = 0;
12103 var b = packetData[i++];
12104 packet708.seq = b >> 6;
12105 packet708.sizeCode = b & 0x3f; // 0b00111111;
12106
12107 for (; i < packetData.length; i++) {
12108 b = packetData[i++];
12109 serviceNum = b >> 5;
12110 blockSize = b & 0x1f; // 0b00011111
12111
12112 if (serviceNum === 7 && blockSize > 0) {
12113 // Extended service num
12114 b = packetData[i++];
12115 serviceNum = b;
12116 }
12117
12118 this.pushServiceBlock(serviceNum, i, blockSize);
12119
12120 if (blockSize > 0) {
12121 i += blockSize - 1;
12122 }
12123 }
12124 };
12125 /**
12126 * Parse service block, execute commands, read text.
12127 *
12128 * Note: While many of these commands serve important purposes,
12129 * many others just parse out the parameters or attributes, but
12130 * nothing is done with them because this is not a full and complete
12131 * implementation of the entire 708 spec.
12132 *
12133 * @param {Integer} serviceNum Service number
12134 * @param {Integer} start Start index of the 708 packet data
12135 * @param {Integer} size Block size
12136 */
12137
12138
12139 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
12140 var b;
12141 var i = start;
12142 var packetData = this.current708Packet.data;
12143 var service = this.services[serviceNum];
12144
12145 if (!service) {
12146 service = this.initService(serviceNum, i);
12147 }
12148
12149 for (; i < start + size && i < packetData.length; i++) {
12150 b = packetData[i];
12151
12152 if (within708TextBlock(b)) {
12153 i = this.handleText(i, service);
12154 } else if (b === 0x18) {
12155 i = this.multiByteCharacter(i, service);
12156 } else if (b === 0x10) {
12157 i = this.extendedCommands(i, service);
12158 } else if (0x80 <= b && b <= 0x87) {
12159 i = this.setCurrentWindow(i, service);
12160 } else if (0x98 <= b && b <= 0x9f) {
12161 i = this.defineWindow(i, service);
12162 } else if (b === 0x88) {
12163 i = this.clearWindows(i, service);
12164 } else if (b === 0x8c) {
12165 i = this.deleteWindows(i, service);
12166 } else if (b === 0x89) {
12167 i = this.displayWindows(i, service);
12168 } else if (b === 0x8a) {
12169 i = this.hideWindows(i, service);
12170 } else if (b === 0x8b) {
12171 i = this.toggleWindows(i, service);
12172 } else if (b === 0x97) {
12173 i = this.setWindowAttributes(i, service);
12174 } else if (b === 0x90) {
12175 i = this.setPenAttributes(i, service);
12176 } else if (b === 0x91) {
12177 i = this.setPenColor(i, service);
12178 } else if (b === 0x92) {
12179 i = this.setPenLocation(i, service);
12180 } else if (b === 0x8f) {
12181 service = this.reset(i, service);
12182 } else if (b === 0x08) {
12183 // BS: Backspace
12184 service.currentWindow.backspace();
12185 } else if (b === 0x0c) {
12186 // FF: Form feed
12187 service.currentWindow.clearText();
12188 } else if (b === 0x0d) {
12189 // CR: Carriage return
12190 service.currentWindow.pendingNewLine = true;
12191 } else if (b === 0x0e) {
12192 // HCR: Horizontal carriage return
12193 service.currentWindow.clearText();
12194 } else if (b === 0x8d) {
12195 // DLY: Delay, nothing to do
12196 i++;
12197 } else ;
12198 }
12199 };
12200 /**
12201 * Execute an extended command
12202 *
12203 * @param {Integer} i Current index in the 708 packet
12204 * @param {Service} service The service object to be affected
12205 * @return {Integer} New index after parsing
12206 */
12207
12208
12209 Cea708Stream.prototype.extendedCommands = function (i, service) {
12210 var packetData = this.current708Packet.data;
12211 var b = packetData[++i];
12212
12213 if (within708TextBlock(b)) {
12214 i = this.handleText(i, service, {
12215 isExtended: true
12216 });
12217 }
12218
12219 return i;
12220 };
12221 /**
12222 * Get PTS value of a given byte index
12223 *
12224 * @param {Integer} byteIndex Index of the byte
12225 * @return {Integer} PTS
12226 */
12227
12228
12229 Cea708Stream.prototype.getPts = function (byteIndex) {
12230 // There's 1 pts value per 2 bytes
12231 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
12232 };
12233 /**
12234 * Initializes a service
12235 *
12236 * @param {Integer} serviceNum Service number
12237 * @return {Service} Initialized service object
12238 */
12239
12240
12241 Cea708Stream.prototype.initService = function (serviceNum, i) {
12242 var serviceName = 'SERVICE' + serviceNum;
12243 var self = this;
12244 var serviceName;
12245 var encoding;
12246
12247 if (serviceName in this.serviceEncodings) {
12248 encoding = this.serviceEncodings[serviceName];
12249 }
12250
12251 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
12252 this.services[serviceNum].init(this.getPts(i), function (pts) {
12253 self.flushDisplayed(pts, self.services[serviceNum]);
12254 });
12255 return this.services[serviceNum];
12256 };
12257 /**
12258 * Execute text writing to current window
12259 *
12260 * @param {Integer} i Current index in the 708 packet
12261 * @param {Service} service The service object to be affected
12262 * @return {Integer} New index after parsing
12263 */
12264
12265
12266 Cea708Stream.prototype.handleText = function (i, service, options) {
12267 var isExtended = options && options.isExtended;
12268 var isMultiByte = options && options.isMultiByte;
12269 var packetData = this.current708Packet.data;
12270 var extended = isExtended ? 0x1000 : 0x0000;
12271 var currentByte = packetData[i];
12272 var nextByte = packetData[i + 1];
12273 var win = service.currentWindow;
12274 var char;
12275 var charCodeArray; // Use the TextDecoder if one was created for this service
12276
12277 if (service.textDecoder_ && !isExtended) {
12278 if (isMultiByte) {
12279 charCodeArray = [currentByte, nextByte];
12280 i++;
12281 } else {
12282 charCodeArray = [currentByte];
12283 }
12284
12285 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
12286 } else {
12287 char = get708CharFromCode(extended | currentByte);
12288 }
12289
12290 if (win.pendingNewLine && !win.isEmpty()) {
12291 win.newLine(this.getPts(i));
12292 }
12293
12294 win.pendingNewLine = false;
12295 win.addText(char);
12296 return i;
12297 };
12298 /**
12299 * Handle decoding of multibyte character
12300 *
12301 * @param {Integer} i Current index in the 708 packet
12302 * @param {Service} service The service object to be affected
12303 * @return {Integer} New index after parsing
12304 */
12305
12306
12307 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
12308 var packetData = this.current708Packet.data;
12309 var firstByte = packetData[i + 1];
12310 var secondByte = packetData[i + 2];
12311
12312 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
12313 i = this.handleText(++i, service, {
12314 isMultiByte: true
12315 });
12316 }
12317
12318 return i;
12319 };
12320 /**
12321 * Parse and execute the CW# command.
12322 *
12323 * Set the current window.
12324 *
12325 * @param {Integer} i Current index in the 708 packet
12326 * @param {Service} service The service object to be affected
12327 * @return {Integer} New index after parsing
12328 */
12329
12330
12331 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
12332 var packetData = this.current708Packet.data;
12333 var b = packetData[i];
12334 var windowNum = b & 0x07;
12335 service.setCurrentWindow(windowNum);
12336 return i;
12337 };
12338 /**
12339 * Parse and execute the DF# command.
12340 *
12341 * Define a window and set it as the current window.
12342 *
12343 * @param {Integer} i Current index in the 708 packet
12344 * @param {Service} service The service object to be affected
12345 * @return {Integer} New index after parsing
12346 */
12347
12348
12349 Cea708Stream.prototype.defineWindow = function (i, service) {
12350 var packetData = this.current708Packet.data;
12351 var b = packetData[i];
12352 var windowNum = b & 0x07;
12353 service.setCurrentWindow(windowNum);
12354 var win = service.currentWindow;
12355 b = packetData[++i];
12356 win.visible = (b & 0x20) >> 5; // v
12357
12358 win.rowLock = (b & 0x10) >> 4; // rl
12359
12360 win.columnLock = (b & 0x08) >> 3; // cl
12361
12362 win.priority = b & 0x07; // p
12363
12364 b = packetData[++i];
12365 win.relativePositioning = (b & 0x80) >> 7; // rp
12366
12367 win.anchorVertical = b & 0x7f; // av
12368
12369 b = packetData[++i];
12370 win.anchorHorizontal = b; // ah
12371
12372 b = packetData[++i];
12373 win.anchorPoint = (b & 0xf0) >> 4; // ap
12374
12375 win.rowCount = b & 0x0f; // rc
12376
12377 b = packetData[++i];
12378 win.columnCount = b & 0x3f; // cc
12379
12380 b = packetData[++i];
12381 win.windowStyle = (b & 0x38) >> 3; // ws
12382
12383 win.penStyle = b & 0x07; // ps
12384 // The spec says there are (rowCount+1) "virtual rows"
12385
12386 win.virtualRowCount = win.rowCount + 1;
12387 return i;
12388 };
12389 /**
12390 * Parse and execute the SWA command.
12391 *
12392 * Set attributes of the current window.
12393 *
12394 * @param {Integer} i Current index in the 708 packet
12395 * @param {Service} service The service object to be affected
12396 * @return {Integer} New index after parsing
12397 */
12398
12399
12400 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
12401 var packetData = this.current708Packet.data;
12402 var b = packetData[i];
12403 var winAttr = service.currentWindow.winAttr;
12404 b = packetData[++i];
12405 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
12406
12407 winAttr.fillRed = (b & 0x30) >> 4; // fr
12408
12409 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
12410
12411 winAttr.fillBlue = b & 0x03; // fb
12412
12413 b = packetData[++i];
12414 winAttr.borderType = (b & 0xc0) >> 6; // bt
12415
12416 winAttr.borderRed = (b & 0x30) >> 4; // br
12417
12418 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
12419
12420 winAttr.borderBlue = b & 0x03; // bb
12421
12422 b = packetData[++i];
12423 winAttr.borderType += (b & 0x80) >> 5; // bt
12424
12425 winAttr.wordWrap = (b & 0x40) >> 6; // ww
12426
12427 winAttr.printDirection = (b & 0x30) >> 4; // pd
12428
12429 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
12430
12431 winAttr.justify = b & 0x03; // j
12432
12433 b = packetData[++i];
12434 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
12435
12436 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
12437
12438 winAttr.displayEffect = b & 0x03; // de
12439
12440 return i;
12441 };
12442 /**
12443 * Gather text from all displayed windows and push a caption to output.
12444 *
12445 * @param {Integer} i Current index in the 708 packet
12446 * @param {Service} service The service object to be affected
12447 */
12448
12449
12450 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
12451 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
12452 // display text in the correct order, but sample files so far have not shown any issue.
12453
12454 for (var winId = 0; winId < 8; winId++) {
12455 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
12456 displayedText.push(service.windows[winId].getText());
12457 }
12458 }
12459
12460 service.endPts = pts;
12461 service.text = displayedText.join('\n\n');
12462 this.pushCaption(service);
12463 service.startPts = pts;
12464 };
12465 /**
12466 * Push a caption to output if the caption contains text.
12467 *
12468 * @param {Service} service The service object to be affected
12469 */
12470
12471
12472 Cea708Stream.prototype.pushCaption = function (service) {
12473 if (service.text !== '') {
12474 this.trigger('data', {
12475 startPts: service.startPts,
12476 endPts: service.endPts,
12477 text: service.text,
12478 stream: 'cc708_' + service.serviceNum
12479 });
12480 service.text = '';
12481 service.startPts = service.endPts;
12482 }
12483 };
12484 /**
12485 * Parse and execute the DSW command.
12486 *
12487 * Set visible property of windows based on the parsed bitmask.
12488 *
12489 * @param {Integer} i Current index in the 708 packet
12490 * @param {Service} service The service object to be affected
12491 * @return {Integer} New index after parsing
12492 */
12493
12494
12495 Cea708Stream.prototype.displayWindows = function (i, service) {
12496 var packetData = this.current708Packet.data;
12497 var b = packetData[++i];
12498 var pts = this.getPts(i);
12499 this.flushDisplayed(pts, service);
12500
12501 for (var winId = 0; winId < 8; winId++) {
12502 if (b & 0x01 << winId) {
12503 service.windows[winId].visible = 1;
12504 }
12505 }
12506
12507 return i;
12508 };
12509 /**
12510 * Parse and execute the HDW command.
12511 *
12512 * Set visible property of windows based on the parsed bitmask.
12513 *
12514 * @param {Integer} i Current index in the 708 packet
12515 * @param {Service} service The service object to be affected
12516 * @return {Integer} New index after parsing
12517 */
12518
12519
12520 Cea708Stream.prototype.hideWindows = function (i, service) {
12521 var packetData = this.current708Packet.data;
12522 var b = packetData[++i];
12523 var pts = this.getPts(i);
12524 this.flushDisplayed(pts, service);
12525
12526 for (var winId = 0; winId < 8; winId++) {
12527 if (b & 0x01 << winId) {
12528 service.windows[winId].visible = 0;
12529 }
12530 }
12531
12532 return i;
12533 };
12534 /**
12535 * Parse and execute the TGW command.
12536 *
12537 * Set visible property of windows based on the parsed bitmask.
12538 *
12539 * @param {Integer} i Current index in the 708 packet
12540 * @param {Service} service The service object to be affected
12541 * @return {Integer} New index after parsing
12542 */
12543
12544
12545 Cea708Stream.prototype.toggleWindows = function (i, service) {
12546 var packetData = this.current708Packet.data;
12547 var b = packetData[++i];
12548 var pts = this.getPts(i);
12549 this.flushDisplayed(pts, service);
12550
12551 for (var winId = 0; winId < 8; winId++) {
12552 if (b & 0x01 << winId) {
12553 service.windows[winId].visible ^= 1;
12554 }
12555 }
12556
12557 return i;
12558 };
12559 /**
12560 * Parse and execute the CLW command.
12561 *
12562 * Clear text of windows based on the parsed bitmask.
12563 *
12564 * @param {Integer} i Current index in the 708 packet
12565 * @param {Service} service The service object to be affected
12566 * @return {Integer} New index after parsing
12567 */
12568
12569
12570 Cea708Stream.prototype.clearWindows = function (i, service) {
12571 var packetData = this.current708Packet.data;
12572 var b = packetData[++i];
12573 var pts = this.getPts(i);
12574 this.flushDisplayed(pts, service);
12575
12576 for (var winId = 0; winId < 8; winId++) {
12577 if (b & 0x01 << winId) {
12578 service.windows[winId].clearText();
12579 }
12580 }
12581
12582 return i;
12583 };
12584 /**
12585 * Parse and execute the DLW command.
12586 *
12587 * Re-initialize windows based on the parsed bitmask.
12588 *
12589 * @param {Integer} i Current index in the 708 packet
12590 * @param {Service} service The service object to be affected
12591 * @return {Integer} New index after parsing
12592 */
12593
12594
12595 Cea708Stream.prototype.deleteWindows = function (i, service) {
12596 var packetData = this.current708Packet.data;
12597 var b = packetData[++i];
12598 var pts = this.getPts(i);
12599 this.flushDisplayed(pts, service);
12600
12601 for (var winId = 0; winId < 8; winId++) {
12602 if (b & 0x01 << winId) {
12603 service.windows[winId].reset();
12604 }
12605 }
12606
12607 return i;
12608 };
12609 /**
12610 * Parse and execute the SPA command.
12611 *
12612 * Set pen attributes of the current window.
12613 *
12614 * @param {Integer} i Current index in the 708 packet
12615 * @param {Service} service The service object to be affected
12616 * @return {Integer} New index after parsing
12617 */
12618
12619
12620 Cea708Stream.prototype.setPenAttributes = function (i, service) {
12621 var packetData = this.current708Packet.data;
12622 var b = packetData[i];
12623 var penAttr = service.currentWindow.penAttr;
12624 b = packetData[++i];
12625 penAttr.textTag = (b & 0xf0) >> 4; // tt
12626
12627 penAttr.offset = (b & 0x0c) >> 2; // o
12628
12629 penAttr.penSize = b & 0x03; // s
12630
12631 b = packetData[++i];
12632 penAttr.italics = (b & 0x80) >> 7; // i
12633
12634 penAttr.underline = (b & 0x40) >> 6; // u
12635
12636 penAttr.edgeType = (b & 0x38) >> 3; // et
12637
12638 penAttr.fontStyle = b & 0x07; // fs
12639
12640 return i;
12641 };
12642 /**
12643 * Parse and execute the SPC command.
12644 *
12645 * Set pen color of the current window.
12646 *
12647 * @param {Integer} i Current index in the 708 packet
12648 * @param {Service} service The service object to be affected
12649 * @return {Integer} New index after parsing
12650 */
12651
12652
12653 Cea708Stream.prototype.setPenColor = function (i, service) {
12654 var packetData = this.current708Packet.data;
12655 var b = packetData[i];
12656 var penColor = service.currentWindow.penColor;
12657 b = packetData[++i];
12658 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
12659
12660 penColor.fgRed = (b & 0x30) >> 4; // fr
12661
12662 penColor.fgGreen = (b & 0x0c) >> 2; // fg
12663
12664 penColor.fgBlue = b & 0x03; // fb
12665
12666 b = packetData[++i];
12667 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
12668
12669 penColor.bgRed = (b & 0x30) >> 4; // br
12670
12671 penColor.bgGreen = (b & 0x0c) >> 2; // bg
12672
12673 penColor.bgBlue = b & 0x03; // bb
12674
12675 b = packetData[++i];
12676 penColor.edgeRed = (b & 0x30) >> 4; // er
12677
12678 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
12679
12680 penColor.edgeBlue = b & 0x03; // eb
12681
12682 return i;
12683 };
12684 /**
12685 * Parse and execute the SPL command.
12686 *
12687 * Set pen location of the current window.
12688 *
12689 * @param {Integer} i Current index in the 708 packet
12690 * @param {Service} service The service object to be affected
12691 * @return {Integer} New index after parsing
12692 */
12693
12694
12695 Cea708Stream.prototype.setPenLocation = function (i, service) {
12696 var packetData = this.current708Packet.data;
12697 var b = packetData[i];
12698 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
12699
12700 service.currentWindow.pendingNewLine = true;
12701 b = packetData[++i];
12702 penLoc.row = b & 0x0f; // r
12703
12704 b = packetData[++i];
12705 penLoc.column = b & 0x3f; // c
12706
12707 return i;
12708 };
12709 /**
12710 * Execute the RST command.
12711 *
12712 * Reset service to a clean slate. Re-initialize.
12713 *
12714 * @param {Integer} i Current index in the 708 packet
12715 * @param {Service} service The service object to be affected
12716 * @return {Service} Re-initialized service
12717 */
12718
12719
12720 Cea708Stream.prototype.reset = function (i, service) {
12721 var pts = this.getPts(i);
12722 this.flushDisplayed(pts, service);
12723 return this.initService(service.serviceNum, i);
12724 }; // This hash maps non-ASCII, special, and extended character codes to their
12725 // proper Unicode equivalent. The first keys that are only a single byte
12726 // are the non-standard ASCII characters, which simply map the CEA608 byte
12727 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
12728 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
12729 // can be performed regardless of the field and data channel on which the
12730 // character code was received.
12731
12732
12733 var CHARACTER_TRANSLATION = {
12734 0x2a: 0xe1,
12735 // á
12736 0x5c: 0xe9,
12737 // é
12738 0x5e: 0xed,
12739 // í
12740 0x5f: 0xf3,
12741 // ó
12742 0x60: 0xfa,
12743 // ú
12744 0x7b: 0xe7,
12745 // ç
12746 0x7c: 0xf7,
12747 // ÷
12748 0x7d: 0xd1,
12749 // Ñ
12750 0x7e: 0xf1,
12751 // ñ
12752 0x7f: 0x2588,
12753 // █
12754 0x0130: 0xae,
12755 // ®
12756 0x0131: 0xb0,
12757 // °
12758 0x0132: 0xbd,
12759 // ½
12760 0x0133: 0xbf,
12761 // ¿
12762 0x0134: 0x2122,
12763 // ™
12764 0x0135: 0xa2,
12765 // ¢
12766 0x0136: 0xa3,
12767 // £
12768 0x0137: 0x266a,
12769 // ♪
12770 0x0138: 0xe0,
12771 // à
12772 0x0139: 0xa0,
12773 //
12774 0x013a: 0xe8,
12775 // è
12776 0x013b: 0xe2,
12777 // â
12778 0x013c: 0xea,
12779 // ê
12780 0x013d: 0xee,
12781 // î
12782 0x013e: 0xf4,
12783 // ô
12784 0x013f: 0xfb,
12785 // û
12786 0x0220: 0xc1,
12787 // Á
12788 0x0221: 0xc9,
12789 // É
12790 0x0222: 0xd3,
12791 // Ó
12792 0x0223: 0xda,
12793 // Ú
12794 0x0224: 0xdc,
12795 // Ü
12796 0x0225: 0xfc,
12797 // ü
12798 0x0226: 0x2018,
12799 // ‘
12800 0x0227: 0xa1,
12801 // ¡
12802 0x0228: 0x2a,
12803 // *
12804 0x0229: 0x27,
12805 // '
12806 0x022a: 0x2014,
12807 // —
12808 0x022b: 0xa9,
12809 // ©
12810 0x022c: 0x2120,
12811 // ℠
12812 0x022d: 0x2022,
12813 // •
12814 0x022e: 0x201c,
12815 // “
12816 0x022f: 0x201d,
12817 // ”
12818 0x0230: 0xc0,
12819 // À
12820 0x0231: 0xc2,
12821 // Â
12822 0x0232: 0xc7,
12823 // Ç
12824 0x0233: 0xc8,
12825 // È
12826 0x0234: 0xca,
12827 // Ê
12828 0x0235: 0xcb,
12829 // Ë
12830 0x0236: 0xeb,
12831 // ë
12832 0x0237: 0xce,
12833 // Î
12834 0x0238: 0xcf,
12835 // Ï
12836 0x0239: 0xef,
12837 // ï
12838 0x023a: 0xd4,
12839 // Ô
12840 0x023b: 0xd9,
12841 // Ù
12842 0x023c: 0xf9,
12843 // ù
12844 0x023d: 0xdb,
12845 // Û
12846 0x023e: 0xab,
12847 // «
12848 0x023f: 0xbb,
12849 // »
12850 0x0320: 0xc3,
12851 // Ã
12852 0x0321: 0xe3,
12853 // ã
12854 0x0322: 0xcd,
12855 // Í
12856 0x0323: 0xcc,
12857 // Ì
12858 0x0324: 0xec,
12859 // ì
12860 0x0325: 0xd2,
12861 // Ò
12862 0x0326: 0xf2,
12863 // ò
12864 0x0327: 0xd5,
12865 // Õ
12866 0x0328: 0xf5,
12867 // õ
12868 0x0329: 0x7b,
12869 // {
12870 0x032a: 0x7d,
12871 // }
12872 0x032b: 0x5c,
12873 // \
12874 0x032c: 0x5e,
12875 // ^
12876 0x032d: 0x5f,
12877 // _
12878 0x032e: 0x7c,
12879 // |
12880 0x032f: 0x7e,
12881 // ~
12882 0x0330: 0xc4,
12883 // Ä
12884 0x0331: 0xe4,
12885 // ä
12886 0x0332: 0xd6,
12887 // Ö
12888 0x0333: 0xf6,
12889 // ö
12890 0x0334: 0xdf,
12891 // ß
12892 0x0335: 0xa5,
12893 // ¥
12894 0x0336: 0xa4,
12895 // ¤
12896 0x0337: 0x2502,
12897 // │
12898 0x0338: 0xc5,
12899 // Å
12900 0x0339: 0xe5,
12901 // å
12902 0x033a: 0xd8,
12903 // Ø
12904 0x033b: 0xf8,
12905 // ø
12906 0x033c: 0x250c,
12907 // ┌
12908 0x033d: 0x2510,
12909 // ┐
12910 0x033e: 0x2514,
12911 // └
12912 0x033f: 0x2518 // ┘
12913
12914 };
12915
12916 var getCharFromCode = function getCharFromCode(code) {
12917 if (code === null) {
12918 return '';
12919 }
12920
12921 code = CHARACTER_TRANSLATION[code] || code;
12922 return String.fromCharCode(code);
12923 }; // the index of the last row in a CEA-608 display buffer
12924
12925
12926 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
12927 // getting it through bit logic.
12928
12929 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
12930 // cells. The "bottom" row is the last element in the outer array.
12931
12932 var createDisplayBuffer = function createDisplayBuffer() {
12933 var result = [],
12934 i = BOTTOM_ROW + 1;
12935
12936 while (i--) {
12937 result.push('');
12938 }
12939
12940 return result;
12941 };
12942
12943 var Cea608Stream = function Cea608Stream(field, dataChannel) {
12944 Cea608Stream.prototype.init.call(this);
12945 this.field_ = field || 0;
12946 this.dataChannel_ = dataChannel || 0;
12947 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
12948 this.setConstants();
12949 this.reset();
12950
12951 this.push = function (packet) {
12952 var data, swap, char0, char1, text; // remove the parity bits
12953
12954 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
12955
12956 if (data === this.lastControlCode_) {
12957 this.lastControlCode_ = null;
12958 return;
12959 } // Store control codes
12960
12961
12962 if ((data & 0xf000) === 0x1000) {
12963 this.lastControlCode_ = data;
12964 } else if (data !== this.PADDING_) {
12965 this.lastControlCode_ = null;
12966 }
12967
12968 char0 = data >>> 8;
12969 char1 = data & 0xff;
12970
12971 if (data === this.PADDING_) {
12972 return;
12973 } else if (data === this.RESUME_CAPTION_LOADING_) {
12974 this.mode_ = 'popOn';
12975 } else if (data === this.END_OF_CAPTION_) {
12976 // If an EOC is received while in paint-on mode, the displayed caption
12977 // text should be swapped to non-displayed memory as if it was a pop-on
12978 // caption. Because of that, we should explicitly switch back to pop-on
12979 // mode
12980 this.mode_ = 'popOn';
12981 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
12982
12983 this.flushDisplayed(packet.pts); // flip memory
12984
12985 swap = this.displayed_;
12986 this.displayed_ = this.nonDisplayed_;
12987 this.nonDisplayed_ = swap; // start measuring the time to display the caption
12988
12989 this.startPts_ = packet.pts;
12990 } else if (data === this.ROLL_UP_2_ROWS_) {
12991 this.rollUpRows_ = 2;
12992 this.setRollUp(packet.pts);
12993 } else if (data === this.ROLL_UP_3_ROWS_) {
12994 this.rollUpRows_ = 3;
12995 this.setRollUp(packet.pts);
12996 } else if (data === this.ROLL_UP_4_ROWS_) {
12997 this.rollUpRows_ = 4;
12998 this.setRollUp(packet.pts);
12999 } else if (data === this.CARRIAGE_RETURN_) {
13000 this.clearFormatting(packet.pts);
13001 this.flushDisplayed(packet.pts);
13002 this.shiftRowsUp_();
13003 this.startPts_ = packet.pts;
13004 } else if (data === this.BACKSPACE_) {
13005 if (this.mode_ === 'popOn') {
13006 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
13007 } else {
13008 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
13009 }
13010 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
13011 this.flushDisplayed(packet.pts);
13012 this.displayed_ = createDisplayBuffer();
13013 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
13014 this.nonDisplayed_ = createDisplayBuffer();
13015 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
13016 if (this.mode_ !== 'paintOn') {
13017 // NOTE: This should be removed when proper caption positioning is
13018 // implemented
13019 this.flushDisplayed(packet.pts);
13020 this.displayed_ = createDisplayBuffer();
13021 }
13022
13023 this.mode_ = 'paintOn';
13024 this.startPts_ = packet.pts; // Append special characters to caption text
13025 } else if (this.isSpecialCharacter(char0, char1)) {
13026 // Bitmask char0 so that we can apply character transformations
13027 // regardless of field and data channel.
13028 // Then byte-shift to the left and OR with char1 so we can pass the
13029 // entire character code to `getCharFromCode`.
13030 char0 = (char0 & 0x03) << 8;
13031 text = getCharFromCode(char0 | char1);
13032 this[this.mode_](packet.pts, text);
13033 this.column_++; // Append extended characters to caption text
13034 } else if (this.isExtCharacter(char0, char1)) {
13035 // Extended characters always follow their "non-extended" equivalents.
13036 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
13037 // decoders are supposed to drop the "è", while compliant decoders
13038 // backspace the "e" and insert "è".
13039 // Delete the previous character
13040 if (this.mode_ === 'popOn') {
13041 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
13042 } else {
13043 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
13044 } // Bitmask char0 so that we can apply character transformations
13045 // regardless of field and data channel.
13046 // Then byte-shift to the left and OR with char1 so we can pass the
13047 // entire character code to `getCharFromCode`.
13048
13049
13050 char0 = (char0 & 0x03) << 8;
13051 text = getCharFromCode(char0 | char1);
13052 this[this.mode_](packet.pts, text);
13053 this.column_++; // Process mid-row codes
13054 } else if (this.isMidRowCode(char0, char1)) {
13055 // Attributes are not additive, so clear all formatting
13056 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
13057 // should be replaced with spaces, so add one now
13058
13059 this[this.mode_](packet.pts, ' ');
13060 this.column_++;
13061
13062 if ((char1 & 0xe) === 0xe) {
13063 this.addFormatting(packet.pts, ['i']);
13064 }
13065
13066 if ((char1 & 0x1) === 0x1) {
13067 this.addFormatting(packet.pts, ['u']);
13068 } // Detect offset control codes and adjust cursor
13069
13070 } else if (this.isOffsetControlCode(char0, char1)) {
13071 // Cursor position is set by indent PAC (see below) in 4-column
13072 // increments, with an additional offset code of 1-3 to reach any
13073 // of the 32 columns specified by CEA-608. So all we need to do
13074 // here is increment the column cursor by the given offset.
13075 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
13076 } else if (this.isPAC(char0, char1)) {
13077 // There's no logic for PAC -> row mapping, so we have to just
13078 // find the row code in an array and use its index :(
13079 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
13080
13081 if (this.mode_ === 'rollUp') {
13082 // This implies that the base row is incorrectly set.
13083 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
13084 // of roll-up rows set.
13085 if (row - this.rollUpRows_ + 1 < 0) {
13086 row = this.rollUpRows_ - 1;
13087 }
13088
13089 this.setRollUp(packet.pts, row);
13090 }
13091
13092 if (row !== this.row_) {
13093 // formatting is only persistent for current row
13094 this.clearFormatting(packet.pts);
13095 this.row_ = row;
13096 } // All PACs can apply underline, so detect and apply
13097 // (All odd-numbered second bytes set underline)
13098
13099
13100 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
13101 this.addFormatting(packet.pts, ['u']);
13102 }
13103
13104 if ((data & 0x10) === 0x10) {
13105 // We've got an indent level code. Each successive even number
13106 // increments the column cursor by 4, so we can get the desired
13107 // column position by bit-shifting to the right (to get n/2)
13108 // and multiplying by 4.
13109 this.column_ = ((data & 0xe) >> 1) * 4;
13110 }
13111
13112 if (this.isColorPAC(char1)) {
13113 // it's a color code, though we only support white, which
13114 // can be either normal or italicized. white italics can be
13115 // either 0x4e or 0x6e depending on the row, so we just
13116 // bitwise-and with 0xe to see if italics should be turned on
13117 if ((char1 & 0xe) === 0xe) {
13118 this.addFormatting(packet.pts, ['i']);
13119 }
13120 } // We have a normal character in char0, and possibly one in char1
13121
13122 } else if (this.isNormalChar(char0)) {
13123 if (char1 === 0x00) {
13124 char1 = null;
13125 }
13126
13127 text = getCharFromCode(char0);
13128 text += getCharFromCode(char1);
13129 this[this.mode_](packet.pts, text);
13130 this.column_ += text.length;
13131 } // finish data processing
13132
13133 };
13134 };
13135
13136 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
13137 // display buffer
13138
13139 Cea608Stream.prototype.flushDisplayed = function (pts) {
13140 var content = this.displayed_ // remove spaces from the start and end of the string
13141 .map(function (row, index) {
13142 try {
13143 return row.trim();
13144 } catch (e) {
13145 // Ordinarily, this shouldn't happen. However, caption
13146 // parsing errors should not throw exceptions and
13147 // break playback.
13148 this.trigger('log', {
13149 level: 'warn',
13150 message: 'Skipping a malformed 608 caption at index ' + index + '.'
13151 });
13152 return '';
13153 }
13154 }, this) // combine all text rows to display in one cue
13155 .join('\n') // and remove blank rows from the start and end, but not the middle
13156 .replace(/^\n+|\n+$/g, '');
13157
13158 if (content.length) {
13159 this.trigger('data', {
13160 startPts: this.startPts_,
13161 endPts: pts,
13162 text: content,
13163 stream: this.name_
13164 });
13165 }
13166 };
13167 /**
13168 * Zero out the data, used for startup and on seek
13169 */
13170
13171
13172 Cea608Stream.prototype.reset = function () {
13173 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
13174 // actually display captions. If a caption is shifted to a row
13175 // with a lower index than this, it is cleared from the display
13176 // buffer
13177
13178 this.topRow_ = 0;
13179 this.startPts_ = 0;
13180 this.displayed_ = createDisplayBuffer();
13181 this.nonDisplayed_ = createDisplayBuffer();
13182 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
13183
13184 this.column_ = 0;
13185 this.row_ = BOTTOM_ROW;
13186 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
13187
13188 this.formatting_ = [];
13189 };
13190 /**
13191 * Sets up control code and related constants for this instance
13192 */
13193
13194
13195 Cea608Stream.prototype.setConstants = function () {
13196 // The following attributes have these uses:
13197 // ext_ : char0 for mid-row codes, and the base for extended
13198 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
13199 // extended codes)
13200 // control_: char0 for control codes, except byte-shifted to the
13201 // left so that we can do this.control_ | CONTROL_CODE
13202 // offset_: char0 for tab offset codes
13203 //
13204 // It's also worth noting that control codes, and _only_ control codes,
13205 // differ between field 1 and field2. Field 2 control codes are always
13206 // their field 1 value plus 1. That's why there's the "| field" on the
13207 // control value.
13208 if (this.dataChannel_ === 0) {
13209 this.BASE_ = 0x10;
13210 this.EXT_ = 0x11;
13211 this.CONTROL_ = (0x14 | this.field_) << 8;
13212 this.OFFSET_ = 0x17;
13213 } else if (this.dataChannel_ === 1) {
13214 this.BASE_ = 0x18;
13215 this.EXT_ = 0x19;
13216 this.CONTROL_ = (0x1c | this.field_) << 8;
13217 this.OFFSET_ = 0x1f;
13218 } // Constants for the LSByte command codes recognized by Cea608Stream. This
13219 // list is not exhaustive. For a more comprehensive listing and semantics see
13220 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
13221 // Padding
13222
13223
13224 this.PADDING_ = 0x0000; // Pop-on Mode
13225
13226 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
13227 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
13228
13229 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
13230 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
13231 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
13232 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
13233
13234 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
13235
13236 this.BACKSPACE_ = this.CONTROL_ | 0x21;
13237 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
13238 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
13239 };
13240 /**
13241 * Detects if the 2-byte packet data is a special character
13242 *
13243 * Special characters have a second byte in the range 0x30 to 0x3f,
13244 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
13245 * data channel 2).
13246 *
13247 * @param {Integer} char0 The first byte
13248 * @param {Integer} char1 The second byte
13249 * @return {Boolean} Whether the 2 bytes are an special character
13250 */
13251
13252
13253 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
13254 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
13255 };
13256 /**
13257 * Detects if the 2-byte packet data is an extended character
13258 *
13259 * Extended characters have a second byte in the range 0x20 to 0x3f,
13260 * with the first byte being 0x12 or 0x13 (for data channel 1) or
13261 * 0x1a or 0x1b (for data channel 2).
13262 *
13263 * @param {Integer} char0 The first byte
13264 * @param {Integer} char1 The second byte
13265 * @return {Boolean} Whether the 2 bytes are an extended character
13266 */
13267
13268
13269 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
13270 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
13271 };
13272 /**
13273 * Detects if the 2-byte packet is a mid-row code
13274 *
13275 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
13276 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
13277 * channel 2).
13278 *
13279 * @param {Integer} char0 The first byte
13280 * @param {Integer} char1 The second byte
13281 * @return {Boolean} Whether the 2 bytes are a mid-row code
13282 */
13283
13284
13285 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
13286 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
13287 };
13288 /**
13289 * Detects if the 2-byte packet is an offset control code
13290 *
13291 * Offset control codes have a second byte in the range 0x21 to 0x23,
13292 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
13293 * data channel 2).
13294 *
13295 * @param {Integer} char0 The first byte
13296 * @param {Integer} char1 The second byte
13297 * @return {Boolean} Whether the 2 bytes are an offset control code
13298 */
13299
13300
13301 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
13302 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
13303 };
13304 /**
13305 * Detects if the 2-byte packet is a Preamble Address Code
13306 *
13307 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
13308 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
13309 * range 0x40 to 0x7f.
13310 *
13311 * @param {Integer} char0 The first byte
13312 * @param {Integer} char1 The second byte
13313 * @return {Boolean} Whether the 2 bytes are a PAC
13314 */
13315
13316
13317 Cea608Stream.prototype.isPAC = function (char0, char1) {
13318 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
13319 };
13320 /**
13321 * Detects if a packet's second byte is in the range of a PAC color code
13322 *
13323 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
13324 * 0x60 to 0x6f.
13325 *
13326 * @param {Integer} char1 The second byte
13327 * @return {Boolean} Whether the byte is a color PAC
13328 */
13329
13330
13331 Cea608Stream.prototype.isColorPAC = function (char1) {
13332 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
13333 };
13334 /**
13335 * Detects if a single byte is in the range of a normal character
13336 *
13337 * Normal text bytes are in the range 0x20 to 0x7f.
13338 *
13339 * @param {Integer} char The byte
13340 * @return {Boolean} Whether the byte is a normal character
13341 */
13342
13343
13344 Cea608Stream.prototype.isNormalChar = function (char) {
13345 return char >= 0x20 && char <= 0x7f;
13346 };
13347 /**
13348 * Configures roll-up
13349 *
13350 * @param {Integer} pts Current PTS
13351 * @param {Integer} newBaseRow Used by PACs to slide the current window to
13352 * a new position
13353 */
13354
13355
13356 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
13357 // Reset the base row to the bottom row when switching modes
13358 if (this.mode_ !== 'rollUp') {
13359 this.row_ = BOTTOM_ROW;
13360 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
13361
13362 this.flushDisplayed(pts);
13363 this.nonDisplayed_ = createDisplayBuffer();
13364 this.displayed_ = createDisplayBuffer();
13365 }
13366
13367 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
13368 // move currently displayed captions (up or down) to the new base row
13369 for (var i = 0; i < this.rollUpRows_; i++) {
13370 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
13371 this.displayed_[this.row_ - i] = '';
13372 }
13373 }
13374
13375 if (newBaseRow === undefined) {
13376 newBaseRow = this.row_;
13377 }
13378
13379 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
13380 }; // Adds the opening HTML tag for the passed character to the caption text,
13381 // and keeps track of it for later closing
13382
13383
13384 Cea608Stream.prototype.addFormatting = function (pts, format) {
13385 this.formatting_ = this.formatting_.concat(format);
13386 var text = format.reduce(function (text, format) {
13387 return text + '<' + format + '>';
13388 }, '');
13389 this[this.mode_](pts, text);
13390 }; // Adds HTML closing tags for current formatting to caption text and
13391 // clears remembered formatting
13392
13393
13394 Cea608Stream.prototype.clearFormatting = function (pts) {
13395 if (!this.formatting_.length) {
13396 return;
13397 }
13398
13399 var text = this.formatting_.reverse().reduce(function (text, format) {
13400 return text + '</' + format + '>';
13401 }, '');
13402 this.formatting_ = [];
13403 this[this.mode_](pts, text);
13404 }; // Mode Implementations
13405
13406
13407 Cea608Stream.prototype.popOn = function (pts, text) {
13408 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
13409
13410 baseRow += text;
13411 this.nonDisplayed_[this.row_] = baseRow;
13412 };
13413
13414 Cea608Stream.prototype.rollUp = function (pts, text) {
13415 var baseRow = this.displayed_[this.row_];
13416 baseRow += text;
13417 this.displayed_[this.row_] = baseRow;
13418 };
13419
13420 Cea608Stream.prototype.shiftRowsUp_ = function () {
13421 var i; // clear out inactive rows
13422
13423 for (i = 0; i < this.topRow_; i++) {
13424 this.displayed_[i] = '';
13425 }
13426
13427 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
13428 this.displayed_[i] = '';
13429 } // shift displayed rows up
13430
13431
13432 for (i = this.topRow_; i < this.row_; i++) {
13433 this.displayed_[i] = this.displayed_[i + 1];
13434 } // clear out the bottom row
13435
13436
13437 this.displayed_[this.row_] = '';
13438 };
13439
13440 Cea608Stream.prototype.paintOn = function (pts, text) {
13441 var baseRow = this.displayed_[this.row_];
13442 baseRow += text;
13443 this.displayed_[this.row_] = baseRow;
13444 }; // exports
13445
13446
13447 var captionStream = {
13448 CaptionStream: CaptionStream$1,
13449 Cea608Stream: Cea608Stream,
13450 Cea708Stream: Cea708Stream
13451 };
13452 /**
13453 * mux.js
13454 *
13455 * Copyright (c) Brightcove
13456 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
13457 */
13458
13459 var streamTypes = {
13460 H264_STREAM_TYPE: 0x1B,
13461 ADTS_STREAM_TYPE: 0x0F,
13462 METADATA_STREAM_TYPE: 0x15
13463 };
13464 var MAX_TS = 8589934592;
13465 var RO_THRESH = 4294967296;
13466 var TYPE_SHARED = 'shared';
13467
13468 var handleRollover$1 = function handleRollover(value, reference) {
13469 var direction = 1;
13470
13471 if (value > reference) {
13472 // If the current timestamp value is greater than our reference timestamp and we detect a
13473 // timestamp rollover, this means the roll over is happening in the opposite direction.
13474 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
13475 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
13476 // rollover point. In loading this segment, the timestamp values will be very large,
13477 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
13478 // the time stamp to be `value - 2^33`.
13479 direction = -1;
13480 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
13481 // cause an incorrect adjustment.
13482
13483
13484 while (Math.abs(reference - value) > RO_THRESH) {
13485 value += direction * MAX_TS;
13486 }
13487
13488 return value;
13489 };
13490
13491 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
13492 var lastDTS, referenceDTS;
13493 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
13494 // video and audio. We could use `undefined` here, but having a string
13495 // makes debugging a little clearer.
13496
13497 this.type_ = type || TYPE_SHARED;
13498
13499 this.push = function (data) {
13500 // Any "shared" rollover streams will accept _all_ data. Otherwise,
13501 // streams will only accept data that matches their type.
13502 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
13503 return;
13504 }
13505
13506 if (referenceDTS === undefined) {
13507 referenceDTS = data.dts;
13508 }
13509
13510 data.dts = handleRollover$1(data.dts, referenceDTS);
13511 data.pts = handleRollover$1(data.pts, referenceDTS);
13512 lastDTS = data.dts;
13513 this.trigger('data', data);
13514 };
13515
13516 this.flush = function () {
13517 referenceDTS = lastDTS;
13518 this.trigger('done');
13519 };
13520
13521 this.endTimeline = function () {
13522 this.flush();
13523 this.trigger('endedtimeline');
13524 };
13525
13526 this.discontinuity = function () {
13527 referenceDTS = void 0;
13528 lastDTS = void 0;
13529 };
13530
13531 this.reset = function () {
13532 this.discontinuity();
13533 this.trigger('reset');
13534 };
13535 };
13536
13537 TimestampRolloverStream$1.prototype = new stream();
13538 var timestampRolloverStream = {
13539 TimestampRolloverStream: TimestampRolloverStream$1,
13540 handleRollover: handleRollover$1
13541 };
13542
13543 var percentEncode$1 = function percentEncode(bytes, start, end) {
13544 var i,
13545 result = '';
13546
13547 for (i = start; i < end; i++) {
13548 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
13549 }
13550
13551 return result;
13552 },
13553 // return the string representation of the specified byte range,
13554 // interpreted as UTf-8.
13555 parseUtf8 = function parseUtf8(bytes, start, end) {
13556 return decodeURIComponent(percentEncode$1(bytes, start, end));
13557 },
13558 // return the string representation of the specified byte range,
13559 // interpreted as ISO-8859-1.
13560 parseIso88591$1 = function parseIso88591(bytes, start, end) {
13561 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
13562 },
13563 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
13564 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
13565 },
13566 tagParsers = {
13567 TXXX: function TXXX(tag) {
13568 var i;
13569
13570 if (tag.data[0] !== 3) {
13571 // ignore frames with unrecognized character encodings
13572 return;
13573 }
13574
13575 for (i = 1; i < tag.data.length; i++) {
13576 if (tag.data[i] === 0) {
13577 // parse the text fields
13578 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
13579
13580 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
13581 break;
13582 }
13583 }
13584
13585 tag.data = tag.value;
13586 },
13587 WXXX: function WXXX(tag) {
13588 var i;
13589
13590 if (tag.data[0] !== 3) {
13591 // ignore frames with unrecognized character encodings
13592 return;
13593 }
13594
13595 for (i = 1; i < tag.data.length; i++) {
13596 if (tag.data[i] === 0) {
13597 // parse the description and URL fields
13598 tag.description = parseUtf8(tag.data, 1, i);
13599 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
13600 break;
13601 }
13602 }
13603 },
13604 PRIV: function PRIV(tag) {
13605 var i;
13606
13607 for (i = 0; i < tag.data.length; i++) {
13608 if (tag.data[i] === 0) {
13609 // parse the description and URL fields
13610 tag.owner = parseIso88591$1(tag.data, 0, i);
13611 break;
13612 }
13613 }
13614
13615 tag.privateData = tag.data.subarray(i + 1);
13616 tag.data = tag.privateData;
13617 }
13618 },
13619 _MetadataStream;
13620
13621 _MetadataStream = function MetadataStream(options) {
13622 var settings = {
13623 // the bytes of the program-level descriptor field in MP2T
13624 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
13625 // program element descriptors"
13626 descriptor: options && options.descriptor
13627 },
13628 // the total size in bytes of the ID3 tag being parsed
13629 tagSize = 0,
13630 // tag data that is not complete enough to be parsed
13631 buffer = [],
13632 // the total number of bytes currently in the buffer
13633 bufferSize = 0,
13634 i;
13635
13636 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
13637 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
13638
13639
13640 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
13641
13642 if (settings.descriptor) {
13643 for (i = 0; i < settings.descriptor.length; i++) {
13644 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
13645 }
13646 }
13647
13648 this.push = function (chunk) {
13649 var tag, frameStart, frameSize, frame, i, frameHeader;
13650
13651 if (chunk.type !== 'timed-metadata') {
13652 return;
13653 } // if data_alignment_indicator is set in the PES header,
13654 // we must have the start of a new ID3 tag. Assume anything
13655 // remaining in the buffer was malformed and throw it out
13656
13657
13658 if (chunk.dataAlignmentIndicator) {
13659 bufferSize = 0;
13660 buffer.length = 0;
13661 } // ignore events that don't look like ID3 data
13662
13663
13664 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
13665 this.trigger('log', {
13666 level: 'warn',
13667 message: 'Skipping unrecognized metadata packet'
13668 });
13669 return;
13670 } // add this chunk to the data we've collected so far
13671
13672
13673 buffer.push(chunk);
13674 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
13675
13676 if (buffer.length === 1) {
13677 // the frame size is transmitted as a 28-bit integer in the
13678 // last four bytes of the ID3 header.
13679 // The most significant bit of each byte is dropped and the
13680 // results concatenated to recover the actual value.
13681 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
13682 // convenient for our comparisons to include it
13683
13684 tagSize += 10;
13685 } // if the entire frame has not arrived, wait for more data
13686
13687
13688 if (bufferSize < tagSize) {
13689 return;
13690 } // collect the entire frame so it can be parsed
13691
13692
13693 tag = {
13694 data: new Uint8Array(tagSize),
13695 frames: [],
13696 pts: buffer[0].pts,
13697 dts: buffer[0].dts
13698 };
13699
13700 for (i = 0; i < tagSize;) {
13701 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
13702 i += buffer[0].data.byteLength;
13703 bufferSize -= buffer[0].data.byteLength;
13704 buffer.shift();
13705 } // find the start of the first frame and the end of the tag
13706
13707
13708 frameStart = 10;
13709
13710 if (tag.data[5] & 0x40) {
13711 // advance the frame start past the extended header
13712 frameStart += 4; // header size field
13713
13714 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
13715
13716 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
13717 } // parse one or more ID3 frames
13718 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
13719
13720
13721 do {
13722 // determine the number of bytes in this frame
13723 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
13724
13725 if (frameSize < 1) {
13726 this.trigger('log', {
13727 level: 'warn',
13728 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
13729 });
13730 return;
13731 }
13732
13733 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
13734 frame = {
13735 id: frameHeader,
13736 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
13737 };
13738 frame.key = frame.id;
13739
13740 if (tagParsers[frame.id]) {
13741 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
13742 // time for raw AAC data
13743
13744 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
13745 var d = frame.data,
13746 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
13747 size *= 4;
13748 size += d[7] & 0x03;
13749 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
13750 // on the value of this frame
13751 // we couldn't have known the appropriate pts and dts before
13752 // parsing this ID3 tag so set those values now
13753
13754 if (tag.pts === undefined && tag.dts === undefined) {
13755 tag.pts = frame.timeStamp;
13756 tag.dts = frame.timeStamp;
13757 }
13758
13759 this.trigger('timestamp', frame);
13760 }
13761 }
13762
13763 tag.frames.push(frame);
13764 frameStart += 10; // advance past the frame header
13765
13766 frameStart += frameSize; // advance past the frame body
13767 } while (frameStart < tagSize);
13768
13769 this.trigger('data', tag);
13770 };
13771 };
13772
13773 _MetadataStream.prototype = new stream();
13774 var metadataStream = _MetadataStream;
13775 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
13776
13777 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
13778
13779
13780 var MP2T_PACKET_LENGTH$1 = 188,
13781 // bytes
13782 SYNC_BYTE$1 = 0x47;
13783 /**
13784 * Splits an incoming stream of binary data into MPEG-2 Transport
13785 * Stream packets.
13786 */
13787
13788 _TransportPacketStream = function TransportPacketStream() {
13789 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
13790 bytesInBuffer = 0;
13791
13792 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
13793
13794 /**
13795 * Split a stream of data into M2TS packets
13796 **/
13797
13798
13799 this.push = function (bytes) {
13800 var startIndex = 0,
13801 endIndex = MP2T_PACKET_LENGTH$1,
13802 everything; // If there are bytes remaining from the last segment, prepend them to the
13803 // bytes that were pushed in
13804
13805 if (bytesInBuffer) {
13806 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
13807 everything.set(buffer.subarray(0, bytesInBuffer));
13808 everything.set(bytes, bytesInBuffer);
13809 bytesInBuffer = 0;
13810 } else {
13811 everything = bytes;
13812 } // While we have enough data for a packet
13813
13814
13815 while (endIndex < everything.byteLength) {
13816 // Look for a pair of start and end sync bytes in the data..
13817 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
13818 // We found a packet so emit it and jump one whole packet forward in
13819 // the stream
13820 this.trigger('data', everything.subarray(startIndex, endIndex));
13821 startIndex += MP2T_PACKET_LENGTH$1;
13822 endIndex += MP2T_PACKET_LENGTH$1;
13823 continue;
13824 } // If we get here, we have somehow become de-synchronized and we need to step
13825 // forward one byte at a time until we find a pair of sync bytes that denote
13826 // a packet
13827
13828
13829 startIndex++;
13830 endIndex++;
13831 } // If there was some data left over at the end of the segment that couldn't
13832 // possibly be a whole packet, keep it because it might be the start of a packet
13833 // that continues in the next segment
13834
13835
13836 if (startIndex < everything.byteLength) {
13837 buffer.set(everything.subarray(startIndex), 0);
13838 bytesInBuffer = everything.byteLength - startIndex;
13839 }
13840 };
13841 /**
13842 * Passes identified M2TS packets to the TransportParseStream to be parsed
13843 **/
13844
13845
13846 this.flush = function () {
13847 // If the buffer contains a whole packet when we are being flushed, emit it
13848 // and empty the buffer. Otherwise hold onto the data because it may be
13849 // important for decoding the next segment
13850 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
13851 this.trigger('data', buffer);
13852 bytesInBuffer = 0;
13853 }
13854
13855 this.trigger('done');
13856 };
13857
13858 this.endTimeline = function () {
13859 this.flush();
13860 this.trigger('endedtimeline');
13861 };
13862
13863 this.reset = function () {
13864 bytesInBuffer = 0;
13865 this.trigger('reset');
13866 };
13867 };
13868
13869 _TransportPacketStream.prototype = new stream();
13870 /**
13871 * Accepts an MP2T TransportPacketStream and emits data events with parsed
13872 * forms of the individual transport stream packets.
13873 */
13874
13875 _TransportParseStream = function TransportParseStream() {
13876 var parsePsi, parsePat, parsePmt, self;
13877
13878 _TransportParseStream.prototype.init.call(this);
13879
13880 self = this;
13881 this.packetsWaitingForPmt = [];
13882 this.programMapTable = undefined;
13883
13884 parsePsi = function parsePsi(payload, psi) {
13885 var offset = 0; // PSI packets may be split into multiple sections and those
13886 // sections may be split into multiple packets. If a PSI
13887 // section starts in this packet, the payload_unit_start_indicator
13888 // will be true and the first byte of the payload will indicate
13889 // the offset from the current position to the start of the
13890 // section.
13891
13892 if (psi.payloadUnitStartIndicator) {
13893 offset += payload[offset] + 1;
13894 }
13895
13896 if (psi.type === 'pat') {
13897 parsePat(payload.subarray(offset), psi);
13898 } else {
13899 parsePmt(payload.subarray(offset), psi);
13900 }
13901 };
13902
13903 parsePat = function parsePat(payload, pat) {
13904 pat.section_number = payload[7]; // eslint-disable-line camelcase
13905
13906 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
13907 // skip the PSI header and parse the first PMT entry
13908
13909 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
13910 pat.pmtPid = self.pmtPid;
13911 };
13912 /**
13913 * Parse out the relevant fields of a Program Map Table (PMT).
13914 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
13915 * packet. The first byte in this array should be the table_id
13916 * field.
13917 * @param pmt {object} the object that should be decorated with
13918 * fields parsed from the PMT.
13919 */
13920
13921
13922 parsePmt = function parsePmt(payload, pmt) {
13923 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
13924 // take effect. We don't believe this should ever be the case
13925 // for HLS but we'll ignore "forward" PMT declarations if we see
13926 // them. Future PMT declarations have the current_next_indicator
13927 // set to zero.
13928
13929 if (!(payload[5] & 0x01)) {
13930 return;
13931 } // overwrite any existing program map table
13932
13933
13934 self.programMapTable = {
13935 video: null,
13936 audio: null,
13937 'timed-metadata': {}
13938 }; // the mapping table ends at the end of the current section
13939
13940 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
13941 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
13942 // long the program info descriptors are
13943
13944 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
13945
13946 offset = 12 + programInfoLength;
13947
13948 while (offset < tableEnd) {
13949 var streamType = payload[offset];
13950 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
13951 // TODO: should this be done for metadata too? for now maintain behavior of
13952 // multiple metadata streams
13953
13954 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
13955 self.programMapTable.video = pid;
13956 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
13957 self.programMapTable.audio = pid;
13958 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
13959 // map pid to stream type for metadata streams
13960 self.programMapTable['timed-metadata'][pid] = streamType;
13961 } // move to the next table entry
13962 // skip past the elementary stream descriptors, if present
13963
13964
13965 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
13966 } // record the map on the packet as well
13967
13968
13969 pmt.programMapTable = self.programMapTable;
13970 };
13971 /**
13972 * Deliver a new MP2T packet to the next stream in the pipeline.
13973 */
13974
13975
13976 this.push = function (packet) {
13977 var result = {},
13978 offset = 4;
13979 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
13980
13981 result.pid = packet[1] & 0x1f;
13982 result.pid <<= 8;
13983 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
13984 // fifth byte of the TS packet header. The adaptation field is
13985 // used to add stuffing to PES packets that don't fill a complete
13986 // TS packet, and to specify some forms of timing and control data
13987 // that we do not currently use.
13988
13989 if ((packet[3] & 0x30) >>> 4 > 0x01) {
13990 offset += packet[offset] + 1;
13991 } // parse the rest of the packet based on the type
13992
13993
13994 if (result.pid === 0) {
13995 result.type = 'pat';
13996 parsePsi(packet.subarray(offset), result);
13997 this.trigger('data', result);
13998 } else if (result.pid === this.pmtPid) {
13999 result.type = 'pmt';
14000 parsePsi(packet.subarray(offset), result);
14001 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
14002
14003 while (this.packetsWaitingForPmt.length) {
14004 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
14005 }
14006 } else if (this.programMapTable === undefined) {
14007 // When we have not seen a PMT yet, defer further processing of
14008 // PES packets until one has been parsed
14009 this.packetsWaitingForPmt.push([packet, offset, result]);
14010 } else {
14011 this.processPes_(packet, offset, result);
14012 }
14013 };
14014
14015 this.processPes_ = function (packet, offset, result) {
14016 // set the appropriate stream type
14017 if (result.pid === this.programMapTable.video) {
14018 result.streamType = streamTypes.H264_STREAM_TYPE;
14019 } else if (result.pid === this.programMapTable.audio) {
14020 result.streamType = streamTypes.ADTS_STREAM_TYPE;
14021 } else {
14022 // if not video or audio, it is timed-metadata or unknown
14023 // if unknown, streamType will be undefined
14024 result.streamType = this.programMapTable['timed-metadata'][result.pid];
14025 }
14026
14027 result.type = 'pes';
14028 result.data = packet.subarray(offset);
14029 this.trigger('data', result);
14030 };
14031 };
14032
14033 _TransportParseStream.prototype = new stream();
14034 _TransportParseStream.STREAM_TYPES = {
14035 h264: 0x1b,
14036 adts: 0x0f
14037 };
14038 /**
14039 * Reconsistutes program elementary stream (PES) packets from parsed
14040 * transport stream packets. That is, if you pipe an
14041 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
14042 * events will be events which capture the bytes for individual PES
14043 * packets plus relevant metadata that has been extracted from the
14044 * container.
14045 */
14046
14047 _ElementaryStream = function ElementaryStream() {
14048 var self = this,
14049 segmentHadPmt = false,
14050 // PES packet fragments
14051 video = {
14052 data: [],
14053 size: 0
14054 },
14055 audio = {
14056 data: [],
14057 size: 0
14058 },
14059 timedMetadata = {
14060 data: [],
14061 size: 0
14062 },
14063 programMapTable,
14064 parsePes = function parsePes(payload, pes) {
14065 var ptsDtsFlags;
14066 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
14067
14068 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
14069 // that are frame data that is continuing from the previous fragment. This
14070 // is to check that the pes data is the start of a new pes payload
14071
14072 if (startPrefix !== 1) {
14073 return;
14074 } // get the packet length, this will be 0 for video
14075
14076
14077 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
14078
14079 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
14080 // and a DTS value. Determine what combination of values is
14081 // available to work with.
14082
14083 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
14084 // performs all bitwise operations on 32-bit integers but javascript
14085 // supports a much greater range (52-bits) of integer using standard
14086 // mathematical operations.
14087 // We construct a 31-bit value using bitwise operators over the 31
14088 // most significant bits and then multiply by 4 (equal to a left-shift
14089 // of 2) before we add the final 2 least significant bits of the
14090 // timestamp (equal to an OR.)
14091
14092 if (ptsDtsFlags & 0xC0) {
14093 // the PTS and DTS are not written out directly. For information
14094 // on how they are encoded, see
14095 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
14096 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
14097 pes.pts *= 4; // Left shift by 2
14098
14099 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
14100
14101 pes.dts = pes.pts;
14102
14103 if (ptsDtsFlags & 0x40) {
14104 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
14105 pes.dts *= 4; // Left shift by 2
14106
14107 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
14108 }
14109 } // the data section starts immediately after the PES header.
14110 // pes_header_data_length specifies the number of header bytes
14111 // that follow the last byte of the field.
14112
14113
14114 pes.data = payload.subarray(9 + payload[8]);
14115 },
14116
14117 /**
14118 * Pass completely parsed PES packets to the next stream in the pipeline
14119 **/
14120 flushStream = function flushStream(stream, type, forceFlush) {
14121 var packetData = new Uint8Array(stream.size),
14122 event = {
14123 type: type
14124 },
14125 i = 0,
14126 offset = 0,
14127 packetFlushable = false,
14128 fragment; // do nothing if there is not enough buffered data for a complete
14129 // PES header
14130
14131 if (!stream.data.length || stream.size < 9) {
14132 return;
14133 }
14134
14135 event.trackId = stream.data[0].pid; // reassemble the packet
14136
14137 for (i = 0; i < stream.data.length; i++) {
14138 fragment = stream.data[i];
14139 packetData.set(fragment.data, offset);
14140 offset += fragment.data.byteLength;
14141 } // parse assembled packet's PES header
14142
14143
14144 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
14145 // check that there is enough stream data to fill the packet
14146
14147 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
14148
14149 if (forceFlush || packetFlushable) {
14150 stream.size = 0;
14151 stream.data.length = 0;
14152 } // only emit packets that are complete. this is to avoid assembling
14153 // incomplete PES packets due to poor segmentation
14154
14155
14156 if (packetFlushable) {
14157 self.trigger('data', event);
14158 }
14159 };
14160
14161 _ElementaryStream.prototype.init.call(this);
14162 /**
14163 * Identifies M2TS packet types and parses PES packets using metadata
14164 * parsed from the PMT
14165 **/
14166
14167
14168 this.push = function (data) {
14169 ({
14170 pat: function pat() {// we have to wait for the PMT to arrive as well before we
14171 // have any meaningful metadata
14172 },
14173 pes: function pes() {
14174 var stream, streamType;
14175
14176 switch (data.streamType) {
14177 case streamTypes.H264_STREAM_TYPE:
14178 stream = video;
14179 streamType = 'video';
14180 break;
14181
14182 case streamTypes.ADTS_STREAM_TYPE:
14183 stream = audio;
14184 streamType = 'audio';
14185 break;
14186
14187 case streamTypes.METADATA_STREAM_TYPE:
14188 stream = timedMetadata;
14189 streamType = 'timed-metadata';
14190 break;
14191
14192 default:
14193 // ignore unknown stream types
14194 return;
14195 } // if a new packet is starting, we can flush the completed
14196 // packet
14197
14198
14199 if (data.payloadUnitStartIndicator) {
14200 flushStream(stream, streamType, true);
14201 } // buffer this fragment until we are sure we've received the
14202 // complete payload
14203
14204
14205 stream.data.push(data);
14206 stream.size += data.data.byteLength;
14207 },
14208 pmt: function pmt() {
14209 var event = {
14210 type: 'metadata',
14211 tracks: []
14212 };
14213 programMapTable = data.programMapTable; // translate audio and video streams to tracks
14214
14215 if (programMapTable.video !== null) {
14216 event.tracks.push({
14217 timelineStartInfo: {
14218 baseMediaDecodeTime: 0
14219 },
14220 id: +programMapTable.video,
14221 codec: 'avc',
14222 type: 'video'
14223 });
14224 }
14225
14226 if (programMapTable.audio !== null) {
14227 event.tracks.push({
14228 timelineStartInfo: {
14229 baseMediaDecodeTime: 0
14230 },
14231 id: +programMapTable.audio,
14232 codec: 'adts',
14233 type: 'audio'
14234 });
14235 }
14236
14237 segmentHadPmt = true;
14238 self.trigger('data', event);
14239 }
14240 })[data.type]();
14241 };
14242
14243 this.reset = function () {
14244 video.size = 0;
14245 video.data.length = 0;
14246 audio.size = 0;
14247 audio.data.length = 0;
14248 this.trigger('reset');
14249 };
14250 /**
14251 * Flush any remaining input. Video PES packets may be of variable
14252 * length. Normally, the start of a new video packet can trigger the
14253 * finalization of the previous packet. That is not possible if no
14254 * more video is forthcoming, however. In that case, some other
14255 * mechanism (like the end of the file) has to be employed. When it is
14256 * clear that no additional data is forthcoming, calling this method
14257 * will flush the buffered packets.
14258 */
14259
14260
14261 this.flushStreams_ = function () {
14262 // !!THIS ORDER IS IMPORTANT!!
14263 // video first then audio
14264 flushStream(video, 'video');
14265 flushStream(audio, 'audio');
14266 flushStream(timedMetadata, 'timed-metadata');
14267 };
14268
14269 this.flush = function () {
14270 // if on flush we haven't had a pmt emitted
14271 // and we have a pmt to emit. emit the pmt
14272 // so that we trigger a trackinfo downstream.
14273 if (!segmentHadPmt && programMapTable) {
14274 var pmt = {
14275 type: 'metadata',
14276 tracks: []
14277 }; // translate audio and video streams to tracks
14278
14279 if (programMapTable.video !== null) {
14280 pmt.tracks.push({
14281 timelineStartInfo: {
14282 baseMediaDecodeTime: 0
14283 },
14284 id: +programMapTable.video,
14285 codec: 'avc',
14286 type: 'video'
14287 });
14288 }
14289
14290 if (programMapTable.audio !== null) {
14291 pmt.tracks.push({
14292 timelineStartInfo: {
14293 baseMediaDecodeTime: 0
14294 },
14295 id: +programMapTable.audio,
14296 codec: 'adts',
14297 type: 'audio'
14298 });
14299 }
14300
14301 self.trigger('data', pmt);
14302 }
14303
14304 segmentHadPmt = false;
14305 this.flushStreams_();
14306 this.trigger('done');
14307 };
14308 };
14309
14310 _ElementaryStream.prototype = new stream();
14311 var m2ts = {
14312 PAT_PID: 0x0000,
14313 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
14314 TransportPacketStream: _TransportPacketStream,
14315 TransportParseStream: _TransportParseStream,
14316 ElementaryStream: _ElementaryStream,
14317 TimestampRolloverStream: TimestampRolloverStream,
14318 CaptionStream: captionStream.CaptionStream,
14319 Cea608Stream: captionStream.Cea608Stream,
14320 Cea708Stream: captionStream.Cea708Stream,
14321 MetadataStream: metadataStream
14322 };
14323
14324 for (var type in streamTypes) {
14325 if (streamTypes.hasOwnProperty(type)) {
14326 m2ts[type] = streamTypes[type];
14327 }
14328 }
14329
14330 var m2ts_1 = m2ts;
14331 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
14332
14333 var _AdtsStream;
14334
14335 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
14336 /*
14337 * Accepts a ElementaryStream and emits data events with parsed
14338 * AAC Audio Frames of the individual packets. Input audio in ADTS
14339 * format is unpacked and re-emitted as AAC frames.
14340 *
14341 * @see http://wiki.multimedia.cx/index.php?title=ADTS
14342 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
14343 */
14344
14345 _AdtsStream = function AdtsStream(handlePartialSegments) {
14346 var buffer,
14347 frameNum = 0;
14348
14349 _AdtsStream.prototype.init.call(this);
14350
14351 this.skipWarn_ = function (start, end) {
14352 this.trigger('log', {
14353 level: 'warn',
14354 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
14355 });
14356 };
14357
14358 this.push = function (packet) {
14359 var i = 0,
14360 frameLength,
14361 protectionSkipBytes,
14362 oldBuffer,
14363 sampleCount,
14364 adtsFrameDuration;
14365
14366 if (!handlePartialSegments) {
14367 frameNum = 0;
14368 }
14369
14370 if (packet.type !== 'audio') {
14371 // ignore non-audio data
14372 return;
14373 } // Prepend any data in the buffer to the input data so that we can parse
14374 // aac frames the cross a PES packet boundary
14375
14376
14377 if (buffer && buffer.length) {
14378 oldBuffer = buffer;
14379 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
14380 buffer.set(oldBuffer);
14381 buffer.set(packet.data, oldBuffer.byteLength);
14382 } else {
14383 buffer = packet.data;
14384 } // unpack any ADTS frames which have been fully received
14385 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
14386
14387
14388 var skip; // We use i + 7 here because we want to be able to parse the entire header.
14389 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
14390
14391 while (i + 7 < buffer.length) {
14392 // Look for the start of an ADTS header..
14393 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
14394 if (typeof skip !== 'number') {
14395 skip = i;
14396 } // If a valid header was not found, jump one forward and attempt to
14397 // find a valid ADTS header starting at the next byte
14398
14399
14400 i++;
14401 continue;
14402 }
14403
14404 if (typeof skip === 'number') {
14405 this.skipWarn_(skip, i);
14406 skip = null;
14407 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
14408 // end of the ADTS header
14409
14410
14411 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
14412 // end of the sync sequence
14413 // NOTE: frame length includes the size of the header
14414
14415 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
14416 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
14417 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
14418 // then we have to wait for more data
14419
14420 if (buffer.byteLength - i < frameLength) {
14421 break;
14422 } // Otherwise, deliver the complete AAC frame
14423
14424
14425 this.trigger('data', {
14426 pts: packet.pts + frameNum * adtsFrameDuration,
14427 dts: packet.dts + frameNum * adtsFrameDuration,
14428 sampleCount: sampleCount,
14429 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
14430 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
14431 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
14432 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
14433 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
14434 samplesize: 16,
14435 // data is the frame without it's header
14436 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
14437 });
14438 frameNum++;
14439 i += frameLength;
14440 }
14441
14442 if (typeof skip === 'number') {
14443 this.skipWarn_(skip, i);
14444 skip = null;
14445 } // remove processed bytes from the buffer.
14446
14447
14448 buffer = buffer.subarray(i);
14449 };
14450
14451 this.flush = function () {
14452 frameNum = 0;
14453 this.trigger('done');
14454 };
14455
14456 this.reset = function () {
14457 buffer = void 0;
14458 this.trigger('reset');
14459 };
14460
14461 this.endTimeline = function () {
14462 buffer = void 0;
14463 this.trigger('endedtimeline');
14464 };
14465 };
14466
14467 _AdtsStream.prototype = new stream();
14468 var adts = _AdtsStream;
14469 /**
14470 * mux.js
14471 *
14472 * Copyright (c) Brightcove
14473 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
14474 */
14475
14476 var ExpGolomb;
14477 /**
14478 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
14479 * scheme used by h264.
14480 */
14481
14482 ExpGolomb = function ExpGolomb(workingData) {
14483 var // the number of bytes left to examine in workingData
14484 workingBytesAvailable = workingData.byteLength,
14485 // the current word being examined
14486 workingWord = 0,
14487 // :uint
14488 // the number of bits left to examine in the current word
14489 workingBitsAvailable = 0; // :uint;
14490 // ():uint
14491
14492 this.length = function () {
14493 return 8 * workingBytesAvailable;
14494 }; // ():uint
14495
14496
14497 this.bitsAvailable = function () {
14498 return 8 * workingBytesAvailable + workingBitsAvailable;
14499 }; // ():void
14500
14501
14502 this.loadWord = function () {
14503 var position = workingData.byteLength - workingBytesAvailable,
14504 workingBytes = new Uint8Array(4),
14505 availableBytes = Math.min(4, workingBytesAvailable);
14506
14507 if (availableBytes === 0) {
14508 throw new Error('no bytes available');
14509 }
14510
14511 workingBytes.set(workingData.subarray(position, position + availableBytes));
14512 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
14513
14514 workingBitsAvailable = availableBytes * 8;
14515 workingBytesAvailable -= availableBytes;
14516 }; // (count:int):void
14517
14518
14519 this.skipBits = function (count) {
14520 var skipBytes; // :int
14521
14522 if (workingBitsAvailable > count) {
14523 workingWord <<= count;
14524 workingBitsAvailable -= count;
14525 } else {
14526 count -= workingBitsAvailable;
14527 skipBytes = Math.floor(count / 8);
14528 count -= skipBytes * 8;
14529 workingBytesAvailable -= skipBytes;
14530 this.loadWord();
14531 workingWord <<= count;
14532 workingBitsAvailable -= count;
14533 }
14534 }; // (size:int):uint
14535
14536
14537 this.readBits = function (size) {
14538 var bits = Math.min(workingBitsAvailable, size),
14539 // :uint
14540 valu = workingWord >>> 32 - bits; // :uint
14541 // if size > 31, handle error
14542
14543 workingBitsAvailable -= bits;
14544
14545 if (workingBitsAvailable > 0) {
14546 workingWord <<= bits;
14547 } else if (workingBytesAvailable > 0) {
14548 this.loadWord();
14549 }
14550
14551 bits = size - bits;
14552
14553 if (bits > 0) {
14554 return valu << bits | this.readBits(bits);
14555 }
14556
14557 return valu;
14558 }; // ():uint
14559
14560
14561 this.skipLeadingZeros = function () {
14562 var leadingZeroCount; // :uint
14563
14564 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
14565 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
14566 // the first bit of working word is 1
14567 workingWord <<= leadingZeroCount;
14568 workingBitsAvailable -= leadingZeroCount;
14569 return leadingZeroCount;
14570 }
14571 } // we exhausted workingWord and still have not found a 1
14572
14573
14574 this.loadWord();
14575 return leadingZeroCount + this.skipLeadingZeros();
14576 }; // ():void
14577
14578
14579 this.skipUnsignedExpGolomb = function () {
14580 this.skipBits(1 + this.skipLeadingZeros());
14581 }; // ():void
14582
14583
14584 this.skipExpGolomb = function () {
14585 this.skipBits(1 + this.skipLeadingZeros());
14586 }; // ():uint
14587
14588
14589 this.readUnsignedExpGolomb = function () {
14590 var clz = this.skipLeadingZeros(); // :uint
14591
14592 return this.readBits(clz + 1) - 1;
14593 }; // ():int
14594
14595
14596 this.readExpGolomb = function () {
14597 var valu = this.readUnsignedExpGolomb(); // :int
14598
14599 if (0x01 & valu) {
14600 // the number is odd if the low order bit is set
14601 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
14602 }
14603
14604 return -1 * (valu >>> 1); // divide by two then make it negative
14605 }; // Some convenience functions
14606 // :Boolean
14607
14608
14609 this.readBoolean = function () {
14610 return this.readBits(1) === 1;
14611 }; // ():int
14612
14613
14614 this.readUnsignedByte = function () {
14615 return this.readBits(8);
14616 };
14617
14618 this.loadWord();
14619 };
14620
14621 var expGolomb = ExpGolomb;
14622
14623 var _H264Stream, _NalByteStream;
14624
14625 var PROFILES_WITH_OPTIONAL_SPS_DATA;
14626 /**
14627 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
14628 */
14629
14630 _NalByteStream = function NalByteStream() {
14631 var syncPoint = 0,
14632 i,
14633 buffer;
14634
14635 _NalByteStream.prototype.init.call(this);
14636 /*
14637 * Scans a byte stream and triggers a data event with the NAL units found.
14638 * @param {Object} data Event received from H264Stream
14639 * @param {Uint8Array} data.data The h264 byte stream to be scanned
14640 *
14641 * @see H264Stream.push
14642 */
14643
14644
14645 this.push = function (data) {
14646 var swapBuffer;
14647
14648 if (!buffer) {
14649 buffer = data.data;
14650 } else {
14651 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
14652 swapBuffer.set(buffer);
14653 swapBuffer.set(data.data, buffer.byteLength);
14654 buffer = swapBuffer;
14655 }
14656
14657 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
14658 // scan for NAL unit boundaries
14659 // a match looks like this:
14660 // 0 0 1 .. NAL .. 0 0 1
14661 // ^ sync point ^ i
14662 // or this:
14663 // 0 0 1 .. NAL .. 0 0 0
14664 // ^ sync point ^ i
14665 // advance the sync point to a NAL start, if necessary
14666
14667 for (; syncPoint < len - 3; syncPoint++) {
14668 if (buffer[syncPoint + 2] === 1) {
14669 // the sync point is properly aligned
14670 i = syncPoint + 5;
14671 break;
14672 }
14673 }
14674
14675 while (i < len) {
14676 // look at the current byte to determine if we've hit the end of
14677 // a NAL unit boundary
14678 switch (buffer[i]) {
14679 case 0:
14680 // skip past non-sync sequences
14681 if (buffer[i - 1] !== 0) {
14682 i += 2;
14683 break;
14684 } else if (buffer[i - 2] !== 0) {
14685 i++;
14686 break;
14687 } // deliver the NAL unit if it isn't empty
14688
14689
14690 if (syncPoint + 3 !== i - 2) {
14691 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14692 } // drop trailing zeroes
14693
14694
14695 do {
14696 i++;
14697 } while (buffer[i] !== 1 && i < len);
14698
14699 syncPoint = i - 2;
14700 i += 3;
14701 break;
14702
14703 case 1:
14704 // skip past non-sync sequences
14705 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
14706 i += 3;
14707 break;
14708 } // deliver the NAL unit
14709
14710
14711 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
14712 syncPoint = i - 2;
14713 i += 3;
14714 break;
14715
14716 default:
14717 // the current byte isn't a one or zero, so it cannot be part
14718 // of a sync sequence
14719 i += 3;
14720 break;
14721 }
14722 } // filter out the NAL units that were delivered
14723
14724
14725 buffer = buffer.subarray(syncPoint);
14726 i -= syncPoint;
14727 syncPoint = 0;
14728 };
14729
14730 this.reset = function () {
14731 buffer = null;
14732 syncPoint = 0;
14733 this.trigger('reset');
14734 };
14735
14736 this.flush = function () {
14737 // deliver the last buffered NAL unit
14738 if (buffer && buffer.byteLength > 3) {
14739 this.trigger('data', buffer.subarray(syncPoint + 3));
14740 } // reset the stream state
14741
14742
14743 buffer = null;
14744 syncPoint = 0;
14745 this.trigger('done');
14746 };
14747
14748 this.endTimeline = function () {
14749 this.flush();
14750 this.trigger('endedtimeline');
14751 };
14752 };
14753
14754 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
14755 // see Recommendation ITU-T H.264 (4/2013),
14756 // 7.3.2.1.1 Sequence parameter set data syntax
14757
14758 PROFILES_WITH_OPTIONAL_SPS_DATA = {
14759 100: true,
14760 110: true,
14761 122: true,
14762 244: true,
14763 44: true,
14764 83: true,
14765 86: true,
14766 118: true,
14767 128: true,
14768 // TODO: the three profiles below don't
14769 // appear to have sps data in the specificiation anymore?
14770 138: true,
14771 139: true,
14772 134: true
14773 };
14774 /**
14775 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
14776 * events.
14777 */
14778
14779 _H264Stream = function H264Stream() {
14780 var nalByteStream = new _NalByteStream(),
14781 self,
14782 trackId,
14783 currentPts,
14784 currentDts,
14785 discardEmulationPreventionBytes,
14786 readSequenceParameterSet,
14787 skipScalingList;
14788
14789 _H264Stream.prototype.init.call(this);
14790
14791 self = this;
14792 /*
14793 * Pushes a packet from a stream onto the NalByteStream
14794 *
14795 * @param {Object} packet - A packet received from a stream
14796 * @param {Uint8Array} packet.data - The raw bytes of the packet
14797 * @param {Number} packet.dts - Decode timestamp of the packet
14798 * @param {Number} packet.pts - Presentation timestamp of the packet
14799 * @param {Number} packet.trackId - The id of the h264 track this packet came from
14800 * @param {('video'|'audio')} packet.type - The type of packet
14801 *
14802 */
14803
14804 this.push = function (packet) {
14805 if (packet.type !== 'video') {
14806 return;
14807 }
14808
14809 trackId = packet.trackId;
14810 currentPts = packet.pts;
14811 currentDts = packet.dts;
14812 nalByteStream.push(packet);
14813 };
14814 /*
14815 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
14816 * for the NALUs to the next stream component.
14817 * Also, preprocess caption and sequence parameter NALUs.
14818 *
14819 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
14820 * @see NalByteStream.push
14821 */
14822
14823
14824 nalByteStream.on('data', function (data) {
14825 var event = {
14826 trackId: trackId,
14827 pts: currentPts,
14828 dts: currentDts,
14829 data: data,
14830 nalUnitTypeCode: data[0] & 0x1f
14831 };
14832
14833 switch (event.nalUnitTypeCode) {
14834 case 0x05:
14835 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
14836 break;
14837
14838 case 0x06:
14839 event.nalUnitType = 'sei_rbsp';
14840 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14841 break;
14842
14843 case 0x07:
14844 event.nalUnitType = 'seq_parameter_set_rbsp';
14845 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
14846 event.config = readSequenceParameterSet(event.escapedRBSP);
14847 break;
14848
14849 case 0x08:
14850 event.nalUnitType = 'pic_parameter_set_rbsp';
14851 break;
14852
14853 case 0x09:
14854 event.nalUnitType = 'access_unit_delimiter_rbsp';
14855 break;
14856 } // This triggers data on the H264Stream
14857
14858
14859 self.trigger('data', event);
14860 });
14861 nalByteStream.on('done', function () {
14862 self.trigger('done');
14863 });
14864 nalByteStream.on('partialdone', function () {
14865 self.trigger('partialdone');
14866 });
14867 nalByteStream.on('reset', function () {
14868 self.trigger('reset');
14869 });
14870 nalByteStream.on('endedtimeline', function () {
14871 self.trigger('endedtimeline');
14872 });
14873
14874 this.flush = function () {
14875 nalByteStream.flush();
14876 };
14877
14878 this.partialFlush = function () {
14879 nalByteStream.partialFlush();
14880 };
14881
14882 this.reset = function () {
14883 nalByteStream.reset();
14884 };
14885
14886 this.endTimeline = function () {
14887 nalByteStream.endTimeline();
14888 };
14889 /**
14890 * Advance the ExpGolomb decoder past a scaling list. The scaling
14891 * list is optionally transmitted as part of a sequence parameter
14892 * set and is not relevant to transmuxing.
14893 * @param count {number} the number of entries in this scaling list
14894 * @param expGolombDecoder {object} an ExpGolomb pointed to the
14895 * start of a scaling list
14896 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14897 */
14898
14899
14900 skipScalingList = function skipScalingList(count, expGolombDecoder) {
14901 var lastScale = 8,
14902 nextScale = 8,
14903 j,
14904 deltaScale;
14905
14906 for (j = 0; j < count; j++) {
14907 if (nextScale !== 0) {
14908 deltaScale = expGolombDecoder.readExpGolomb();
14909 nextScale = (lastScale + deltaScale + 256) % 256;
14910 }
14911
14912 lastScale = nextScale === 0 ? lastScale : nextScale;
14913 }
14914 };
14915 /**
14916 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
14917 * Sequence Payload"
14918 * @param data {Uint8Array} the bytes of a RBSP from a NAL
14919 * unit
14920 * @return {Uint8Array} the RBSP without any Emulation
14921 * Prevention Bytes
14922 */
14923
14924
14925 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
14926 var length = data.byteLength,
14927 emulationPreventionBytesPositions = [],
14928 i = 1,
14929 newLength,
14930 newData; // Find all `Emulation Prevention Bytes`
14931
14932 while (i < length - 2) {
14933 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
14934 emulationPreventionBytesPositions.push(i + 2);
14935 i += 2;
14936 } else {
14937 i++;
14938 }
14939 } // If no Emulation Prevention Bytes were found just return the original
14940 // array
14941
14942
14943 if (emulationPreventionBytesPositions.length === 0) {
14944 return data;
14945 } // Create a new array to hold the NAL unit data
14946
14947
14948 newLength = length - emulationPreventionBytesPositions.length;
14949 newData = new Uint8Array(newLength);
14950 var sourceIndex = 0;
14951
14952 for (i = 0; i < newLength; sourceIndex++, i++) {
14953 if (sourceIndex === emulationPreventionBytesPositions[0]) {
14954 // Skip this byte
14955 sourceIndex++; // Remove this position index
14956
14957 emulationPreventionBytesPositions.shift();
14958 }
14959
14960 newData[i] = data[sourceIndex];
14961 }
14962
14963 return newData;
14964 };
14965 /**
14966 * Read a sequence parameter set and return some interesting video
14967 * properties. A sequence parameter set is the H264 metadata that
14968 * describes the properties of upcoming video frames.
14969 * @param data {Uint8Array} the bytes of a sequence parameter set
14970 * @return {object} an object with configuration parsed from the
14971 * sequence parameter set, including the dimensions of the
14972 * associated video frames.
14973 */
14974
14975
14976 readSequenceParameterSet = function readSequenceParameterSet(data) {
14977 var frameCropLeftOffset = 0,
14978 frameCropRightOffset = 0,
14979 frameCropTopOffset = 0,
14980 frameCropBottomOffset = 0,
14981 expGolombDecoder,
14982 profileIdc,
14983 levelIdc,
14984 profileCompatibility,
14985 chromaFormatIdc,
14986 picOrderCntType,
14987 numRefFramesInPicOrderCntCycle,
14988 picWidthInMbsMinus1,
14989 picHeightInMapUnitsMinus1,
14990 frameMbsOnlyFlag,
14991 scalingListCount,
14992 sarRatio = [1, 1],
14993 aspectRatioIdc,
14994 i;
14995 expGolombDecoder = new expGolomb(data);
14996 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
14997
14998 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
14999
15000 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
15001
15002 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
15003 // some profiles have more optional data we don't need
15004
15005 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
15006 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
15007
15008 if (chromaFormatIdc === 3) {
15009 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
15010 }
15011
15012 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
15013
15014 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
15015
15016 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
15017
15018 if (expGolombDecoder.readBoolean()) {
15019 // seq_scaling_matrix_present_flag
15020 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
15021
15022 for (i = 0; i < scalingListCount; i++) {
15023 if (expGolombDecoder.readBoolean()) {
15024 // seq_scaling_list_present_flag[ i ]
15025 if (i < 6) {
15026 skipScalingList(16, expGolombDecoder);
15027 } else {
15028 skipScalingList(64, expGolombDecoder);
15029 }
15030 }
15031 }
15032 }
15033 }
15034
15035 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
15036
15037 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
15038
15039 if (picOrderCntType === 0) {
15040 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
15041 } else if (picOrderCntType === 1) {
15042 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
15043
15044 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
15045
15046 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
15047
15048 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
15049
15050 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
15051 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
15052 }
15053 }
15054
15055 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
15056
15057 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
15058
15059 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15060 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
15061 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
15062
15063 if (frameMbsOnlyFlag === 0) {
15064 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
15065 }
15066
15067 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
15068
15069 if (expGolombDecoder.readBoolean()) {
15070 // frame_cropping_flag
15071 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
15072 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
15073 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
15074 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
15075 }
15076
15077 if (expGolombDecoder.readBoolean()) {
15078 // vui_parameters_present_flag
15079 if (expGolombDecoder.readBoolean()) {
15080 // aspect_ratio_info_present_flag
15081 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
15082
15083 switch (aspectRatioIdc) {
15084 case 1:
15085 sarRatio = [1, 1];
15086 break;
15087
15088 case 2:
15089 sarRatio = [12, 11];
15090 break;
15091
15092 case 3:
15093 sarRatio = [10, 11];
15094 break;
15095
15096 case 4:
15097 sarRatio = [16, 11];
15098 break;
15099
15100 case 5:
15101 sarRatio = [40, 33];
15102 break;
15103
15104 case 6:
15105 sarRatio = [24, 11];
15106 break;
15107
15108 case 7:
15109 sarRatio = [20, 11];
15110 break;
15111
15112 case 8:
15113 sarRatio = [32, 11];
15114 break;
15115
15116 case 9:
15117 sarRatio = [80, 33];
15118 break;
15119
15120 case 10:
15121 sarRatio = [18, 11];
15122 break;
15123
15124 case 11:
15125 sarRatio = [15, 11];
15126 break;
15127
15128 case 12:
15129 sarRatio = [64, 33];
15130 break;
15131
15132 case 13:
15133 sarRatio = [160, 99];
15134 break;
15135
15136 case 14:
15137 sarRatio = [4, 3];
15138 break;
15139
15140 case 15:
15141 sarRatio = [3, 2];
15142 break;
15143
15144 case 16:
15145 sarRatio = [2, 1];
15146 break;
15147
15148 case 255:
15149 {
15150 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
15151 break;
15152 }
15153 }
15154
15155 if (sarRatio) {
15156 sarRatio[0] / sarRatio[1];
15157 }
15158 }
15159 }
15160
15161 return {
15162 profileIdc: profileIdc,
15163 levelIdc: levelIdc,
15164 profileCompatibility: profileCompatibility,
15165 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
15166 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
15167 // sar is sample aspect ratio
15168 sarRatio: sarRatio
15169 };
15170 };
15171 };
15172
15173 _H264Stream.prototype = new stream();
15174 var h264 = {
15175 H264Stream: _H264Stream,
15176 NalByteStream: _NalByteStream
15177 };
15178 /**
15179 * mux.js
15180 *
15181 * Copyright (c) Brightcove
15182 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
15183 *
15184 * Utilities to detect basic properties and metadata about Aac data.
15185 */
15186
15187 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
15188
15189 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
15190 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
15191 flags = header[byteIndex + 5],
15192 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
15193
15194 returnSize = returnSize >= 0 ? returnSize : 0;
15195
15196 if (footerPresent) {
15197 return returnSize + 20;
15198 }
15199
15200 return returnSize + 10;
15201 };
15202
15203 var getId3Offset = function getId3Offset(data, offset) {
15204 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
15205 return offset;
15206 }
15207
15208 offset += parseId3TagSize(data, offset);
15209 return getId3Offset(data, offset);
15210 }; // TODO: use vhs-utils
15211
15212
15213 var isLikelyAacData$1 = function isLikelyAacData(data) {
15214 var offset = getId3Offset(data, 0);
15215 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
15216 // is not mp3 data but aac data.
15217 (data[offset + 1] & 0x16) === 0x10;
15218 };
15219
15220 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
15221 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
15222 }; // return a percent-encoded representation of the specified byte range
15223 // @see http://en.wikipedia.org/wiki/Percent-encoding
15224
15225
15226 var percentEncode = function percentEncode(bytes, start, end) {
15227 var i,
15228 result = '';
15229
15230 for (i = start; i < end; i++) {
15231 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
15232 }
15233
15234 return result;
15235 }; // return the string representation of the specified byte range,
15236 // interpreted as ISO-8859-1.
15237
15238
15239 var parseIso88591 = function parseIso88591(bytes, start, end) {
15240 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
15241 };
15242
15243 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
15244 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
15245 middle = header[byteIndex + 4] << 3,
15246 highTwo = header[byteIndex + 3] & 0x3 << 11;
15247 return highTwo | middle | lowThree;
15248 };
15249
15250 var parseType$2 = function parseType(header, byteIndex) {
15251 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
15252 return 'timed-metadata';
15253 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
15254 return 'audio';
15255 }
15256
15257 return null;
15258 };
15259
15260 var parseSampleRate = function parseSampleRate(packet) {
15261 var i = 0;
15262
15263 while (i + 5 < packet.length) {
15264 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
15265 // If a valid header was not found, jump one forward and attempt to
15266 // find a valid ADTS header starting at the next byte
15267 i++;
15268 continue;
15269 }
15270
15271 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
15272 }
15273
15274 return null;
15275 };
15276
15277 var parseAacTimestamp = function parseAacTimestamp(packet) {
15278 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
15279
15280 frameStart = 10;
15281
15282 if (packet[5] & 0x40) {
15283 // advance the frame start past the extended header
15284 frameStart += 4; // header size field
15285
15286 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
15287 } // parse one or more ID3 frames
15288 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
15289
15290
15291 do {
15292 // determine the number of bytes in this frame
15293 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
15294
15295 if (frameSize < 1) {
15296 return null;
15297 }
15298
15299 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
15300
15301 if (frameHeader === 'PRIV') {
15302 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
15303
15304 for (var i = 0; i < frame.byteLength; i++) {
15305 if (frame[i] === 0) {
15306 var owner = parseIso88591(frame, 0, i);
15307
15308 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
15309 var d = frame.subarray(i + 1);
15310 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
15311 size *= 4;
15312 size += d[7] & 0x03;
15313 return size;
15314 }
15315
15316 break;
15317 }
15318 }
15319 }
15320
15321 frameStart += 10; // advance past the frame header
15322
15323 frameStart += frameSize; // advance past the frame body
15324 } while (frameStart < packet.byteLength);
15325
15326 return null;
15327 };
15328
15329 var utils = {
15330 isLikelyAacData: isLikelyAacData$1,
15331 parseId3TagSize: parseId3TagSize,
15332 parseAdtsSize: parseAdtsSize,
15333 parseType: parseType$2,
15334 parseSampleRate: parseSampleRate,
15335 parseAacTimestamp: parseAacTimestamp
15336 };
15337
15338 var _AacStream;
15339 /**
15340 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
15341 */
15342
15343
15344 _AacStream = function AacStream() {
15345 var everything = new Uint8Array(),
15346 timeStamp = 0;
15347
15348 _AacStream.prototype.init.call(this);
15349
15350 this.setTimestamp = function (timestamp) {
15351 timeStamp = timestamp;
15352 };
15353
15354 this.push = function (bytes) {
15355 var frameSize = 0,
15356 byteIndex = 0,
15357 bytesLeft,
15358 chunk,
15359 packet,
15360 tempLength; // If there are bytes remaining from the last segment, prepend them to the
15361 // bytes that were pushed in
15362
15363 if (everything.length) {
15364 tempLength = everything.length;
15365 everything = new Uint8Array(bytes.byteLength + tempLength);
15366 everything.set(everything.subarray(0, tempLength));
15367 everything.set(bytes, tempLength);
15368 } else {
15369 everything = bytes;
15370 }
15371
15372 while (everything.length - byteIndex >= 3) {
15373 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
15374 // Exit early because we don't have enough to parse
15375 // the ID3 tag header
15376 if (everything.length - byteIndex < 10) {
15377 break;
15378 } // check framesize
15379
15380
15381 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15382 // to emit a full packet
15383 // Add to byteIndex to support multiple ID3 tags in sequence
15384
15385 if (byteIndex + frameSize > everything.length) {
15386 break;
15387 }
15388
15389 chunk = {
15390 type: 'timed-metadata',
15391 data: everything.subarray(byteIndex, byteIndex + frameSize)
15392 };
15393 this.trigger('data', chunk);
15394 byteIndex += frameSize;
15395 continue;
15396 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
15397 // Exit early because we don't have enough to parse
15398 // the ADTS frame header
15399 if (everything.length - byteIndex < 7) {
15400 break;
15401 }
15402
15403 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
15404 // to emit a full packet
15405
15406 if (byteIndex + frameSize > everything.length) {
15407 break;
15408 }
15409
15410 packet = {
15411 type: 'audio',
15412 data: everything.subarray(byteIndex, byteIndex + frameSize),
15413 pts: timeStamp,
15414 dts: timeStamp
15415 };
15416 this.trigger('data', packet);
15417 byteIndex += frameSize;
15418 continue;
15419 }
15420
15421 byteIndex++;
15422 }
15423
15424 bytesLeft = everything.length - byteIndex;
15425
15426 if (bytesLeft > 0) {
15427 everything = everything.subarray(byteIndex);
15428 } else {
15429 everything = new Uint8Array();
15430 }
15431 };
15432
15433 this.reset = function () {
15434 everything = new Uint8Array();
15435 this.trigger('reset');
15436 };
15437
15438 this.endTimeline = function () {
15439 everything = new Uint8Array();
15440 this.trigger('endedtimeline');
15441 };
15442 };
15443
15444 _AacStream.prototype = new stream();
15445 var aac = _AacStream; // constants
15446
15447 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
15448 var audioProperties = AUDIO_PROPERTIES;
15449 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
15450 var videoProperties = VIDEO_PROPERTIES;
15451 var H264Stream = h264.H264Stream;
15452 var isLikelyAacData = utils.isLikelyAacData;
15453 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
15454
15455 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
15456
15457 var retriggerForStream = function retriggerForStream(key, event) {
15458 event.stream = key;
15459 this.trigger('log', event);
15460 };
15461
15462 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
15463 var keys = Object.keys(pipeline);
15464
15465 for (var i = 0; i < keys.length; i++) {
15466 var key = keys[i]; // skip non-stream keys and headOfPipeline
15467 // which is just a duplicate
15468
15469 if (key === 'headOfPipeline' || !pipeline[key].on) {
15470 continue;
15471 }
15472
15473 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
15474 }
15475 };
15476 /**
15477 * Compare two arrays (even typed) for same-ness
15478 */
15479
15480
15481 var arrayEquals = function arrayEquals(a, b) {
15482 var i;
15483
15484 if (a.length !== b.length) {
15485 return false;
15486 } // compare the value of each element in the array
15487
15488
15489 for (i = 0; i < a.length; i++) {
15490 if (a[i] !== b[i]) {
15491 return false;
15492 }
15493 }
15494
15495 return true;
15496 };
15497
15498 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
15499 var ptsOffsetFromDts = startPts - startDts,
15500 decodeDuration = endDts - startDts,
15501 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
15502 // however, the player time values will reflect a start from the baseMediaDecodeTime.
15503 // In order to provide relevant values for the player times, base timing info on the
15504 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
15505
15506 return {
15507 start: {
15508 dts: baseMediaDecodeTime,
15509 pts: baseMediaDecodeTime + ptsOffsetFromDts
15510 },
15511 end: {
15512 dts: baseMediaDecodeTime + decodeDuration,
15513 pts: baseMediaDecodeTime + presentationDuration
15514 },
15515 prependedContentDuration: prependedContentDuration,
15516 baseMediaDecodeTime: baseMediaDecodeTime
15517 };
15518 };
15519 /**
15520 * Constructs a single-track, ISO BMFF media segment from AAC data
15521 * events. The output of this stream can be fed to a SourceBuffer
15522 * configured with a suitable initialization segment.
15523 * @param track {object} track metadata configuration
15524 * @param options {object} transmuxer options object
15525 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15526 * in the source; false to adjust the first segment to start at 0.
15527 */
15528
15529
15530 _AudioSegmentStream = function AudioSegmentStream(track, options) {
15531 var adtsFrames = [],
15532 sequenceNumber,
15533 earliestAllowedDts = 0,
15534 audioAppendStartTs = 0,
15535 videoBaseMediaDecodeTime = Infinity;
15536 options = options || {};
15537 sequenceNumber = options.firstSequenceNumber || 0;
15538
15539 _AudioSegmentStream.prototype.init.call(this);
15540
15541 this.push = function (data) {
15542 trackDecodeInfo.collectDtsInfo(track, data);
15543
15544 if (track) {
15545 audioProperties.forEach(function (prop) {
15546 track[prop] = data[prop];
15547 });
15548 } // buffer audio data until end() is called
15549
15550
15551 adtsFrames.push(data);
15552 };
15553
15554 this.setEarliestDts = function (earliestDts) {
15555 earliestAllowedDts = earliestDts;
15556 };
15557
15558 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
15559 videoBaseMediaDecodeTime = baseMediaDecodeTime;
15560 };
15561
15562 this.setAudioAppendStart = function (timestamp) {
15563 audioAppendStartTs = timestamp;
15564 };
15565
15566 this.flush = function () {
15567 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
15568
15569 if (adtsFrames.length === 0) {
15570 this.trigger('done', 'AudioSegmentStream');
15571 return;
15572 }
15573
15574 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
15575 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
15576
15577 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
15578 // samples (that is, adts frames) in the audio data
15579
15580 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
15581
15582 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
15583 adtsFrames = [];
15584 moof = mp4Generator.moof(sequenceNumber, [track]);
15585 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
15586
15587 sequenceNumber++;
15588 boxes.set(moof);
15589 boxes.set(mdat, moof.byteLength);
15590 trackDecodeInfo.clearDtsInfo(track);
15591 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
15592 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
15593 // valid use-case where an init segment/data should be triggered without associated
15594 // frames. Leaving for now, but should be looked into.
15595
15596 if (frames.length) {
15597 segmentDuration = frames.length * frameDuration;
15598 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
15599 // frame info is in video clock cycles. Convert to match expectation of
15600 // listeners (that all timestamps will be based on video clock cycles).
15601 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
15602 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
15603 this.trigger('timingInfo', {
15604 start: frames[0].pts,
15605 end: frames[0].pts + segmentDuration
15606 });
15607 }
15608
15609 this.trigger('data', {
15610 track: track,
15611 boxes: boxes
15612 });
15613 this.trigger('done', 'AudioSegmentStream');
15614 };
15615
15616 this.reset = function () {
15617 trackDecodeInfo.clearDtsInfo(track);
15618 adtsFrames = [];
15619 this.trigger('reset');
15620 };
15621 };
15622
15623 _AudioSegmentStream.prototype = new stream();
15624 /**
15625 * Constructs a single-track, ISO BMFF media segment from H264 data
15626 * events. The output of this stream can be fed to a SourceBuffer
15627 * configured with a suitable initialization segment.
15628 * @param track {object} track metadata configuration
15629 * @param options {object} transmuxer options object
15630 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
15631 * gopsToAlignWith list when attempting to align gop pts
15632 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
15633 * in the source; false to adjust the first segment to start at 0.
15634 */
15635
15636 _VideoSegmentStream = function VideoSegmentStream(track, options) {
15637 var sequenceNumber,
15638 nalUnits = [],
15639 gopsToAlignWith = [],
15640 config,
15641 pps;
15642 options = options || {};
15643 sequenceNumber = options.firstSequenceNumber || 0;
15644
15645 _VideoSegmentStream.prototype.init.call(this);
15646
15647 delete track.minPTS;
15648 this.gopCache_ = [];
15649 /**
15650 * Constructs a ISO BMFF segment given H264 nalUnits
15651 * @param {Object} nalUnit A data event representing a nalUnit
15652 * @param {String} nalUnit.nalUnitType
15653 * @param {Object} nalUnit.config Properties for a mp4 track
15654 * @param {Uint8Array} nalUnit.data The nalUnit bytes
15655 * @see lib/codecs/h264.js
15656 **/
15657
15658 this.push = function (nalUnit) {
15659 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
15660
15661 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
15662 config = nalUnit.config;
15663 track.sps = [nalUnit.data];
15664 videoProperties.forEach(function (prop) {
15665 track[prop] = config[prop];
15666 }, this);
15667 }
15668
15669 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
15670 pps = nalUnit.data;
15671 track.pps = [nalUnit.data];
15672 } // buffer video until flush() is called
15673
15674
15675 nalUnits.push(nalUnit);
15676 };
15677 /**
15678 * Pass constructed ISO BMFF track and boxes on to the
15679 * next stream in the pipeline
15680 **/
15681
15682
15683 this.flush = function () {
15684 var frames,
15685 gopForFusion,
15686 gops,
15687 moof,
15688 mdat,
15689 boxes,
15690 prependedContentDuration = 0,
15691 firstGop,
15692 lastGop; // Throw away nalUnits at the start of the byte stream until
15693 // we find the first AUD
15694
15695 while (nalUnits.length) {
15696 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
15697 break;
15698 }
15699
15700 nalUnits.shift();
15701 } // Return early if no video data has been observed
15702
15703
15704 if (nalUnits.length === 0) {
15705 this.resetStream_();
15706 this.trigger('done', 'VideoSegmentStream');
15707 return;
15708 } // Organize the raw nal-units into arrays that represent
15709 // higher-level constructs such as frames and gops
15710 // (group-of-pictures)
15711
15712
15713 frames = frameUtils.groupNalsIntoFrames(nalUnits);
15714 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
15715 // a problem since MSE (on Chrome) requires a leading keyframe.
15716 //
15717 // We have two approaches to repairing this situation:
15718 // 1) GOP-FUSION:
15719 // This is where we keep track of the GOPS (group-of-pictures)
15720 // from previous fragments and attempt to find one that we can
15721 // prepend to the current fragment in order to create a valid
15722 // fragment.
15723 // 2) KEYFRAME-PULLING:
15724 // Here we search for the first keyframe in the fragment and
15725 // throw away all the frames between the start of the fragment
15726 // and that keyframe. We then extend the duration and pull the
15727 // PTS of the keyframe forward so that it covers the time range
15728 // of the frames that were disposed of.
15729 //
15730 // #1 is far prefereable over #2 which can cause "stuttering" but
15731 // requires more things to be just right.
15732
15733 if (!gops[0][0].keyFrame) {
15734 // Search for a gop for fusion from our gopCache
15735 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
15736
15737 if (gopForFusion) {
15738 // in order to provide more accurate timing information about the segment, save
15739 // the number of seconds prepended to the original segment due to GOP fusion
15740 prependedContentDuration = gopForFusion.duration;
15741 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
15742 // new gop at the beginning
15743
15744 gops.byteLength += gopForFusion.byteLength;
15745 gops.nalCount += gopForFusion.nalCount;
15746 gops.pts = gopForFusion.pts;
15747 gops.dts = gopForFusion.dts;
15748 gops.duration += gopForFusion.duration;
15749 } else {
15750 // If we didn't find a candidate gop fall back to keyframe-pulling
15751 gops = frameUtils.extendFirstKeyFrame(gops);
15752 }
15753 } // Trim gops to align with gopsToAlignWith
15754
15755
15756 if (gopsToAlignWith.length) {
15757 var alignedGops;
15758
15759 if (options.alignGopsAtEnd) {
15760 alignedGops = this.alignGopsAtEnd_(gops);
15761 } else {
15762 alignedGops = this.alignGopsAtStart_(gops);
15763 }
15764
15765 if (!alignedGops) {
15766 // save all the nals in the last GOP into the gop cache
15767 this.gopCache_.unshift({
15768 gop: gops.pop(),
15769 pps: track.pps,
15770 sps: track.sps
15771 }); // Keep a maximum of 6 GOPs in the cache
15772
15773 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15774
15775 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
15776
15777 this.resetStream_();
15778 this.trigger('done', 'VideoSegmentStream');
15779 return;
15780 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
15781 // when recalculated before sending off to CoalesceStream
15782
15783
15784 trackDecodeInfo.clearDtsInfo(track);
15785 gops = alignedGops;
15786 }
15787
15788 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
15789 // samples (that is, frames) in the video data
15790
15791 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
15792
15793 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
15794 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
15795 this.trigger('processedGopsInfo', gops.map(function (gop) {
15796 return {
15797 pts: gop.pts,
15798 dts: gop.dts,
15799 byteLength: gop.byteLength
15800 };
15801 }));
15802 firstGop = gops[0];
15803 lastGop = gops[gops.length - 1];
15804 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
15805 this.trigger('timingInfo', {
15806 start: gops[0].pts,
15807 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
15808 }); // save all the nals in the last GOP into the gop cache
15809
15810 this.gopCache_.unshift({
15811 gop: gops.pop(),
15812 pps: track.pps,
15813 sps: track.sps
15814 }); // Keep a maximum of 6 GOPs in the cache
15815
15816 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
15817
15818 nalUnits = [];
15819 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
15820 this.trigger('timelineStartInfo', track.timelineStartInfo);
15821 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
15822 // throwing away hundreds of media segment fragments
15823
15824 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
15825
15826 sequenceNumber++;
15827 boxes.set(moof);
15828 boxes.set(mdat, moof.byteLength);
15829 this.trigger('data', {
15830 track: track,
15831 boxes: boxes
15832 });
15833 this.resetStream_(); // Continue with the flush process now
15834
15835 this.trigger('done', 'VideoSegmentStream');
15836 };
15837
15838 this.reset = function () {
15839 this.resetStream_();
15840 nalUnits = [];
15841 this.gopCache_.length = 0;
15842 gopsToAlignWith.length = 0;
15843 this.trigger('reset');
15844 };
15845
15846 this.resetStream_ = function () {
15847 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
15848 // for instance, when we are rendition switching
15849
15850 config = undefined;
15851 pps = undefined;
15852 }; // Search for a candidate Gop for gop-fusion from the gop cache and
15853 // return it or return null if no good candidate was found
15854
15855
15856 this.getGopForFusion_ = function (nalUnit) {
15857 var halfSecond = 45000,
15858 // Half-a-second in a 90khz clock
15859 allowableOverlap = 10000,
15860 // About 3 frames @ 30fps
15861 nearestDistance = Infinity,
15862 dtsDistance,
15863 nearestGopObj,
15864 currentGop,
15865 currentGopObj,
15866 i; // Search for the GOP nearest to the beginning of this nal unit
15867
15868 for (i = 0; i < this.gopCache_.length; i++) {
15869 currentGopObj = this.gopCache_[i];
15870 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
15871
15872 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
15873 continue;
15874 } // Reject Gops that would require a negative baseMediaDecodeTime
15875
15876
15877 if (currentGop.dts < track.timelineStartInfo.dts) {
15878 continue;
15879 } // The distance between the end of the gop and the start of the nalUnit
15880
15881
15882 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
15883 // a half-second of the nal unit
15884
15885 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
15886 // Always use the closest GOP we found if there is more than
15887 // one candidate
15888 if (!nearestGopObj || nearestDistance > dtsDistance) {
15889 nearestGopObj = currentGopObj;
15890 nearestDistance = dtsDistance;
15891 }
15892 }
15893 }
15894
15895 if (nearestGopObj) {
15896 return nearestGopObj.gop;
15897 }
15898
15899 return null;
15900 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15901 // of gopsToAlignWith starting from the START of the list
15902
15903
15904 this.alignGopsAtStart_ = function (gops) {
15905 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
15906 byteLength = gops.byteLength;
15907 nalCount = gops.nalCount;
15908 duration = gops.duration;
15909 alignIndex = gopIndex = 0;
15910
15911 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
15912 align = gopsToAlignWith[alignIndex];
15913 gop = gops[gopIndex];
15914
15915 if (align.pts === gop.pts) {
15916 break;
15917 }
15918
15919 if (gop.pts > align.pts) {
15920 // this current gop starts after the current gop we want to align on, so increment
15921 // align index
15922 alignIndex++;
15923 continue;
15924 } // current gop starts before the current gop we want to align on. so increment gop
15925 // index
15926
15927
15928 gopIndex++;
15929 byteLength -= gop.byteLength;
15930 nalCount -= gop.nalCount;
15931 duration -= gop.duration;
15932 }
15933
15934 if (gopIndex === 0) {
15935 // no gops to trim
15936 return gops;
15937 }
15938
15939 if (gopIndex === gops.length) {
15940 // all gops trimmed, skip appending all gops
15941 return null;
15942 }
15943
15944 alignedGops = gops.slice(gopIndex);
15945 alignedGops.byteLength = byteLength;
15946 alignedGops.duration = duration;
15947 alignedGops.nalCount = nalCount;
15948 alignedGops.pts = alignedGops[0].pts;
15949 alignedGops.dts = alignedGops[0].dts;
15950 return alignedGops;
15951 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
15952 // of gopsToAlignWith starting from the END of the list
15953
15954
15955 this.alignGopsAtEnd_ = function (gops) {
15956 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
15957 alignIndex = gopsToAlignWith.length - 1;
15958 gopIndex = gops.length - 1;
15959 alignEndIndex = null;
15960 matchFound = false;
15961
15962 while (alignIndex >= 0 && gopIndex >= 0) {
15963 align = gopsToAlignWith[alignIndex];
15964 gop = gops[gopIndex];
15965
15966 if (align.pts === gop.pts) {
15967 matchFound = true;
15968 break;
15969 }
15970
15971 if (align.pts > gop.pts) {
15972 alignIndex--;
15973 continue;
15974 }
15975
15976 if (alignIndex === gopsToAlignWith.length - 1) {
15977 // gop.pts is greater than the last alignment candidate. If no match is found
15978 // by the end of this loop, we still want to append gops that come after this
15979 // point
15980 alignEndIndex = gopIndex;
15981 }
15982
15983 gopIndex--;
15984 }
15985
15986 if (!matchFound && alignEndIndex === null) {
15987 return null;
15988 }
15989
15990 var trimIndex;
15991
15992 if (matchFound) {
15993 trimIndex = gopIndex;
15994 } else {
15995 trimIndex = alignEndIndex;
15996 }
15997
15998 if (trimIndex === 0) {
15999 return gops;
16000 }
16001
16002 var alignedGops = gops.slice(trimIndex);
16003 var metadata = alignedGops.reduce(function (total, gop) {
16004 total.byteLength += gop.byteLength;
16005 total.duration += gop.duration;
16006 total.nalCount += gop.nalCount;
16007 return total;
16008 }, {
16009 byteLength: 0,
16010 duration: 0,
16011 nalCount: 0
16012 });
16013 alignedGops.byteLength = metadata.byteLength;
16014 alignedGops.duration = metadata.duration;
16015 alignedGops.nalCount = metadata.nalCount;
16016 alignedGops.pts = alignedGops[0].pts;
16017 alignedGops.dts = alignedGops[0].dts;
16018 return alignedGops;
16019 };
16020
16021 this.alignGopsWith = function (newGopsToAlignWith) {
16022 gopsToAlignWith = newGopsToAlignWith;
16023 };
16024 };
16025
16026 _VideoSegmentStream.prototype = new stream();
16027 /**
16028 * A Stream that can combine multiple streams (ie. audio & video)
16029 * into a single output segment for MSE. Also supports audio-only
16030 * and video-only streams.
16031 * @param options {object} transmuxer options object
16032 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
16033 * in the source; false to adjust the first segment to start at media timeline start.
16034 */
16035
16036 _CoalesceStream = function CoalesceStream(options, metadataStream) {
16037 // Number of Tracks per output segment
16038 // If greater than 1, we combine multiple
16039 // tracks into a single segment
16040 this.numberOfTracks = 0;
16041 this.metadataStream = metadataStream;
16042 options = options || {};
16043
16044 if (typeof options.remux !== 'undefined') {
16045 this.remuxTracks = !!options.remux;
16046 } else {
16047 this.remuxTracks = true;
16048 }
16049
16050 if (typeof options.keepOriginalTimestamps === 'boolean') {
16051 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
16052 } else {
16053 this.keepOriginalTimestamps = false;
16054 }
16055
16056 this.pendingTracks = [];
16057 this.videoTrack = null;
16058 this.pendingBoxes = [];
16059 this.pendingCaptions = [];
16060 this.pendingMetadata = [];
16061 this.pendingBytes = 0;
16062 this.emittedTracks = 0;
16063
16064 _CoalesceStream.prototype.init.call(this); // Take output from multiple
16065
16066
16067 this.push = function (output) {
16068 // buffer incoming captions until the associated video segment
16069 // finishes
16070 if (output.text) {
16071 return this.pendingCaptions.push(output);
16072 } // buffer incoming id3 tags until the final flush
16073
16074
16075 if (output.frames) {
16076 return this.pendingMetadata.push(output);
16077 } // Add this track to the list of pending tracks and store
16078 // important information required for the construction of
16079 // the final segment
16080
16081
16082 this.pendingTracks.push(output.track);
16083 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
16084 // We unshift audio and push video because
16085 // as of Chrome 75 when switching from
16086 // one init segment to another if the video
16087 // mdat does not appear after the audio mdat
16088 // only audio will play for the duration of our transmux.
16089
16090 if (output.track.type === 'video') {
16091 this.videoTrack = output.track;
16092 this.pendingBoxes.push(output.boxes);
16093 }
16094
16095 if (output.track.type === 'audio') {
16096 this.audioTrack = output.track;
16097 this.pendingBoxes.unshift(output.boxes);
16098 }
16099 };
16100 };
16101
16102 _CoalesceStream.prototype = new stream();
16103
16104 _CoalesceStream.prototype.flush = function (flushSource) {
16105 var offset = 0,
16106 event = {
16107 captions: [],
16108 captionStreams: {},
16109 metadata: [],
16110 info: {}
16111 },
16112 caption,
16113 id3,
16114 initSegment,
16115 timelineStartPts = 0,
16116 i;
16117
16118 if (this.pendingTracks.length < this.numberOfTracks) {
16119 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
16120 // Return because we haven't received a flush from a data-generating
16121 // portion of the segment (meaning that we have only recieved meta-data
16122 // or captions.)
16123 return;
16124 } else if (this.remuxTracks) {
16125 // Return until we have enough tracks from the pipeline to remux (if we
16126 // are remuxing audio and video into a single MP4)
16127 return;
16128 } else if (this.pendingTracks.length === 0) {
16129 // In the case where we receive a flush without any data having been
16130 // received we consider it an emitted track for the purposes of coalescing
16131 // `done` events.
16132 // We do this for the case where there is an audio and video track in the
16133 // segment but no audio data. (seen in several playlists with alternate
16134 // audio tracks and no audio present in the main TS segments.)
16135 this.emittedTracks++;
16136
16137 if (this.emittedTracks >= this.numberOfTracks) {
16138 this.trigger('done');
16139 this.emittedTracks = 0;
16140 }
16141
16142 return;
16143 }
16144 }
16145
16146 if (this.videoTrack) {
16147 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
16148 videoProperties.forEach(function (prop) {
16149 event.info[prop] = this.videoTrack[prop];
16150 }, this);
16151 } else if (this.audioTrack) {
16152 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
16153 audioProperties.forEach(function (prop) {
16154 event.info[prop] = this.audioTrack[prop];
16155 }, this);
16156 }
16157
16158 if (this.videoTrack || this.audioTrack) {
16159 if (this.pendingTracks.length === 1) {
16160 event.type = this.pendingTracks[0].type;
16161 } else {
16162 event.type = 'combined';
16163 }
16164
16165 this.emittedTracks += this.pendingTracks.length;
16166 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
16167
16168 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
16169 // and track definitions
16170
16171 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
16172
16173 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
16174
16175 for (i = 0; i < this.pendingBoxes.length; i++) {
16176 event.data.set(this.pendingBoxes[i], offset);
16177 offset += this.pendingBoxes[i].byteLength;
16178 } // Translate caption PTS times into second offsets to match the
16179 // video timeline for the segment, and add track info
16180
16181
16182 for (i = 0; i < this.pendingCaptions.length; i++) {
16183 caption = this.pendingCaptions[i];
16184 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
16185 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
16186 event.captionStreams[caption.stream] = true;
16187 event.captions.push(caption);
16188 } // Translate ID3 frame PTS times into second offsets to match the
16189 // video timeline for the segment
16190
16191
16192 for (i = 0; i < this.pendingMetadata.length; i++) {
16193 id3 = this.pendingMetadata[i];
16194 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
16195 event.metadata.push(id3);
16196 } // We add this to every single emitted segment even though we only need
16197 // it for the first
16198
16199
16200 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
16201
16202 this.pendingTracks.length = 0;
16203 this.videoTrack = null;
16204 this.pendingBoxes.length = 0;
16205 this.pendingCaptions.length = 0;
16206 this.pendingBytes = 0;
16207 this.pendingMetadata.length = 0; // Emit the built segment
16208 // We include captions and ID3 tags for backwards compatibility,
16209 // ideally we should send only video and audio in the data event
16210
16211 this.trigger('data', event); // Emit each caption to the outside world
16212 // Ideally, this would happen immediately on parsing captions,
16213 // but we need to ensure that video data is sent back first
16214 // so that caption timing can be adjusted to match video timing
16215
16216 for (i = 0; i < event.captions.length; i++) {
16217 caption = event.captions[i];
16218 this.trigger('caption', caption);
16219 } // Emit each id3 tag to the outside world
16220 // Ideally, this would happen immediately on parsing the tag,
16221 // but we need to ensure that video data is sent back first
16222 // so that ID3 frame timing can be adjusted to match video timing
16223
16224
16225 for (i = 0; i < event.metadata.length; i++) {
16226 id3 = event.metadata[i];
16227 this.trigger('id3Frame', id3);
16228 }
16229 } // Only emit `done` if all tracks have been flushed and emitted
16230
16231
16232 if (this.emittedTracks >= this.numberOfTracks) {
16233 this.trigger('done');
16234 this.emittedTracks = 0;
16235 }
16236 };
16237
16238 _CoalesceStream.prototype.setRemux = function (val) {
16239 this.remuxTracks = val;
16240 };
16241 /**
16242 * A Stream that expects MP2T binary data as input and produces
16243 * corresponding media segments, suitable for use with Media Source
16244 * Extension (MSE) implementations that support the ISO BMFF byte
16245 * stream format, like Chrome.
16246 */
16247
16248
16249 _Transmuxer = function Transmuxer(options) {
16250 var self = this,
16251 hasFlushed = true,
16252 videoTrack,
16253 audioTrack;
16254
16255 _Transmuxer.prototype.init.call(this);
16256
16257 options = options || {};
16258 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
16259 this.transmuxPipeline_ = {};
16260
16261 this.setupAacPipeline = function () {
16262 var pipeline = {};
16263 this.transmuxPipeline_ = pipeline;
16264 pipeline.type = 'aac';
16265 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16266
16267 pipeline.aacStream = new aac();
16268 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
16269 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
16270 pipeline.adtsStream = new adts();
16271 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16272 pipeline.headOfPipeline = pipeline.aacStream;
16273 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
16274 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
16275 pipeline.metadataStream.on('timestamp', function (frame) {
16276 pipeline.aacStream.setTimestamp(frame.timeStamp);
16277 });
16278 pipeline.aacStream.on('data', function (data) {
16279 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
16280 return;
16281 }
16282
16283 audioTrack = audioTrack || {
16284 timelineStartInfo: {
16285 baseMediaDecodeTime: self.baseMediaDecodeTime
16286 },
16287 codec: 'adts',
16288 type: 'audio'
16289 }; // hook up the audio segment stream to the first track with aac data
16290
16291 pipeline.coalesceStream.numberOfTracks++;
16292 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16293 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16294 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
16295
16296 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
16297
16298 self.trigger('trackinfo', {
16299 hasAudio: !!audioTrack,
16300 hasVideo: !!videoTrack
16301 });
16302 }); // Re-emit any data coming from the coalesce stream to the outside world
16303
16304 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
16305
16306 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16307 addPipelineLogRetriggers(this, pipeline);
16308 };
16309
16310 this.setupTsPipeline = function () {
16311 var pipeline = {};
16312 this.transmuxPipeline_ = pipeline;
16313 pipeline.type = 'ts';
16314 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
16315
16316 pipeline.packetStream = new m2ts_1.TransportPacketStream();
16317 pipeline.parseStream = new m2ts_1.TransportParseStream();
16318 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
16319 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
16320 pipeline.adtsStream = new adts();
16321 pipeline.h264Stream = new H264Stream();
16322 pipeline.captionStream = new m2ts_1.CaptionStream(options);
16323 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
16324 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
16325
16326 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
16327 // demux the streams
16328
16329 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
16330 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
16331 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
16332
16333 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
16334 pipeline.elementaryStream.on('data', function (data) {
16335 var i;
16336
16337 if (data.type === 'metadata') {
16338 i = data.tracks.length; // scan the tracks listed in the metadata
16339
16340 while (i--) {
16341 if (!videoTrack && data.tracks[i].type === 'video') {
16342 videoTrack = data.tracks[i];
16343 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16344 } else if (!audioTrack && data.tracks[i].type === 'audio') {
16345 audioTrack = data.tracks[i];
16346 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
16347 }
16348 } // hook up the video segment stream to the first track with h264 data
16349
16350
16351 if (videoTrack && !pipeline.videoSegmentStream) {
16352 pipeline.coalesceStream.numberOfTracks++;
16353 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
16354 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
16355 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
16356 // When video emits timelineStartInfo data after a flush, we forward that
16357 // info to the AudioSegmentStream, if it exists, because video timeline
16358 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
16359 // because this is a particularly subtle form of timestamp alteration.
16360 if (audioTrack && !options.keepOriginalTimestamps) {
16361 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
16362 // very earliest DTS we have seen in video because Chrome will
16363 // interpret any video track with a baseMediaDecodeTime that is
16364 // non-zero as a gap.
16365
16366 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
16367 }
16368 });
16369 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
16370 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
16371 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
16372 if (audioTrack) {
16373 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
16374 }
16375 });
16376 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
16377
16378 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
16379 }
16380
16381 if (audioTrack && !pipeline.audioSegmentStream) {
16382 // hook up the audio segment stream to the first track with aac data
16383 pipeline.coalesceStream.numberOfTracks++;
16384 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
16385 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
16386 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
16387 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
16388
16389 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
16390 } // emit pmt info
16391
16392
16393 self.trigger('trackinfo', {
16394 hasAudio: !!audioTrack,
16395 hasVideo: !!videoTrack
16396 });
16397 }
16398 }); // Re-emit any data coming from the coalesce stream to the outside world
16399
16400 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
16401 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
16402 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
16403 self.trigger('id3Frame', id3Frame);
16404 });
16405 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
16406
16407 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
16408 addPipelineLogRetriggers(this, pipeline);
16409 }; // hook up the segment streams once track metadata is delivered
16410
16411
16412 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
16413 var pipeline = this.transmuxPipeline_;
16414
16415 if (!options.keepOriginalTimestamps) {
16416 this.baseMediaDecodeTime = baseMediaDecodeTime;
16417 }
16418
16419 if (audioTrack) {
16420 audioTrack.timelineStartInfo.dts = undefined;
16421 audioTrack.timelineStartInfo.pts = undefined;
16422 trackDecodeInfo.clearDtsInfo(audioTrack);
16423
16424 if (pipeline.audioTimestampRolloverStream) {
16425 pipeline.audioTimestampRolloverStream.discontinuity();
16426 }
16427 }
16428
16429 if (videoTrack) {
16430 if (pipeline.videoSegmentStream) {
16431 pipeline.videoSegmentStream.gopCache_ = [];
16432 }
16433
16434 videoTrack.timelineStartInfo.dts = undefined;
16435 videoTrack.timelineStartInfo.pts = undefined;
16436 trackDecodeInfo.clearDtsInfo(videoTrack);
16437 pipeline.captionStream.reset();
16438 }
16439
16440 if (pipeline.timestampRolloverStream) {
16441 pipeline.timestampRolloverStream.discontinuity();
16442 }
16443 };
16444
16445 this.setAudioAppendStart = function (timestamp) {
16446 if (audioTrack) {
16447 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
16448 }
16449 };
16450
16451 this.setRemux = function (val) {
16452 var pipeline = this.transmuxPipeline_;
16453 options.remux = val;
16454
16455 if (pipeline && pipeline.coalesceStream) {
16456 pipeline.coalesceStream.setRemux(val);
16457 }
16458 };
16459
16460 this.alignGopsWith = function (gopsToAlignWith) {
16461 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
16462 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
16463 }
16464 };
16465
16466 this.getLogTrigger_ = function (key) {
16467 var self = this;
16468 return function (event) {
16469 event.stream = key;
16470 self.trigger('log', event);
16471 };
16472 }; // feed incoming data to the front of the parsing pipeline
16473
16474
16475 this.push = function (data) {
16476 if (hasFlushed) {
16477 var isAac = isLikelyAacData(data);
16478
16479 if (isAac && this.transmuxPipeline_.type !== 'aac') {
16480 this.setupAacPipeline();
16481 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
16482 this.setupTsPipeline();
16483 }
16484
16485 hasFlushed = false;
16486 }
16487
16488 this.transmuxPipeline_.headOfPipeline.push(data);
16489 }; // flush any buffered data
16490
16491
16492 this.flush = function () {
16493 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
16494
16495 this.transmuxPipeline_.headOfPipeline.flush();
16496 };
16497
16498 this.endTimeline = function () {
16499 this.transmuxPipeline_.headOfPipeline.endTimeline();
16500 };
16501
16502 this.reset = function () {
16503 if (this.transmuxPipeline_.headOfPipeline) {
16504 this.transmuxPipeline_.headOfPipeline.reset();
16505 }
16506 }; // Caption data has to be reset when seeking outside buffered range
16507
16508
16509 this.resetCaptions = function () {
16510 if (this.transmuxPipeline_.captionStream) {
16511 this.transmuxPipeline_.captionStream.reset();
16512 }
16513 };
16514 };
16515
16516 _Transmuxer.prototype = new stream();
16517 var transmuxer = {
16518 Transmuxer: _Transmuxer,
16519 VideoSegmentStream: _VideoSegmentStream,
16520 AudioSegmentStream: _AudioSegmentStream,
16521 AUDIO_PROPERTIES: audioProperties,
16522 VIDEO_PROPERTIES: videoProperties,
16523 // exported for testing
16524 generateSegmentTimingInfo: generateSegmentTimingInfo
16525 };
16526 /**
16527 * mux.js
16528 *
16529 * Copyright (c) Brightcove
16530 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
16531 */
16532
16533 var toUnsigned$3 = function toUnsigned(value) {
16534 return value >>> 0;
16535 };
16536
16537 var toHexString$1 = function toHexString(value) {
16538 return ('00' + value.toString(16)).slice(-2);
16539 };
16540
16541 var bin = {
16542 toUnsigned: toUnsigned$3,
16543 toHexString: toHexString$1
16544 };
16545
16546 var parseType$1 = function parseType(buffer) {
16547 var result = '';
16548 result += String.fromCharCode(buffer[0]);
16549 result += String.fromCharCode(buffer[1]);
16550 result += String.fromCharCode(buffer[2]);
16551 result += String.fromCharCode(buffer[3]);
16552 return result;
16553 };
16554
16555 var parseType_1 = parseType$1;
16556 var toUnsigned$2 = bin.toUnsigned;
16557
16558 var findBox = function findBox(data, path) {
16559 var results = [],
16560 i,
16561 size,
16562 type,
16563 end,
16564 subresults;
16565
16566 if (!path.length) {
16567 // short-circuit the search for empty paths
16568 return null;
16569 }
16570
16571 for (i = 0; i < data.byteLength;) {
16572 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
16573 type = parseType_1(data.subarray(i + 4, i + 8));
16574 end = size > 1 ? i + size : data.byteLength;
16575
16576 if (type === path[0]) {
16577 if (path.length === 1) {
16578 // this is the end of the path and we've found the box we were
16579 // looking for
16580 results.push(data.subarray(i + 8, end));
16581 } else {
16582 // recursively search for the next box along the path
16583 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
16584
16585 if (subresults.length) {
16586 results = results.concat(subresults);
16587 }
16588 }
16589 }
16590
16591 i = end;
16592 } // we've finished searching all of data
16593
16594
16595 return results;
16596 };
16597
16598 var findBox_1 = findBox;
16599 var toUnsigned$1 = bin.toUnsigned;
16600 var getUint64$1 = numbers.getUint64;
16601
16602 var tfdt = function tfdt(data) {
16603 var result = {
16604 version: data[0],
16605 flags: new Uint8Array(data.subarray(1, 4))
16606 };
16607
16608 if (result.version === 1) {
16609 result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
16610 } else {
16611 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
16612 }
16613
16614 return result;
16615 };
16616
16617 var parseTfdt = tfdt;
16618
16619 var parseSampleFlags = function parseSampleFlags(flags) {
16620 return {
16621 isLeading: (flags[0] & 0x0c) >>> 2,
16622 dependsOn: flags[0] & 0x03,
16623 isDependedOn: (flags[1] & 0xc0) >>> 6,
16624 hasRedundancy: (flags[1] & 0x30) >>> 4,
16625 paddingValue: (flags[1] & 0x0e) >>> 1,
16626 isNonSyncSample: flags[1] & 0x01,
16627 degradationPriority: flags[2] << 8 | flags[3]
16628 };
16629 };
16630
16631 var parseSampleFlags_1 = parseSampleFlags;
16632
16633 var trun = function trun(data) {
16634 var result = {
16635 version: data[0],
16636 flags: new Uint8Array(data.subarray(1, 4)),
16637 samples: []
16638 },
16639 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16640 // Flag interpretation
16641 dataOffsetPresent = result.flags[2] & 0x01,
16642 // compare with 2nd byte of 0x1
16643 firstSampleFlagsPresent = result.flags[2] & 0x04,
16644 // compare with 2nd byte of 0x4
16645 sampleDurationPresent = result.flags[1] & 0x01,
16646 // compare with 2nd byte of 0x100
16647 sampleSizePresent = result.flags[1] & 0x02,
16648 // compare with 2nd byte of 0x200
16649 sampleFlagsPresent = result.flags[1] & 0x04,
16650 // compare with 2nd byte of 0x400
16651 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
16652 // compare with 2nd byte of 0x800
16653 sampleCount = view.getUint32(4),
16654 offset = 8,
16655 sample;
16656
16657 if (dataOffsetPresent) {
16658 // 32 bit signed integer
16659 result.dataOffset = view.getInt32(offset);
16660 offset += 4;
16661 } // Overrides the flags for the first sample only. The order of
16662 // optional values will be: duration, size, compositionTimeOffset
16663
16664
16665 if (firstSampleFlagsPresent && sampleCount) {
16666 sample = {
16667 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
16668 };
16669 offset += 4;
16670
16671 if (sampleDurationPresent) {
16672 sample.duration = view.getUint32(offset);
16673 offset += 4;
16674 }
16675
16676 if (sampleSizePresent) {
16677 sample.size = view.getUint32(offset);
16678 offset += 4;
16679 }
16680
16681 if (sampleCompositionTimeOffsetPresent) {
16682 if (result.version === 1) {
16683 sample.compositionTimeOffset = view.getInt32(offset);
16684 } else {
16685 sample.compositionTimeOffset = view.getUint32(offset);
16686 }
16687
16688 offset += 4;
16689 }
16690
16691 result.samples.push(sample);
16692 sampleCount--;
16693 }
16694
16695 while (sampleCount--) {
16696 sample = {};
16697
16698 if (sampleDurationPresent) {
16699 sample.duration = view.getUint32(offset);
16700 offset += 4;
16701 }
16702
16703 if (sampleSizePresent) {
16704 sample.size = view.getUint32(offset);
16705 offset += 4;
16706 }
16707
16708 if (sampleFlagsPresent) {
16709 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
16710 offset += 4;
16711 }
16712
16713 if (sampleCompositionTimeOffsetPresent) {
16714 if (result.version === 1) {
16715 sample.compositionTimeOffset = view.getInt32(offset);
16716 } else {
16717 sample.compositionTimeOffset = view.getUint32(offset);
16718 }
16719
16720 offset += 4;
16721 }
16722
16723 result.samples.push(sample);
16724 }
16725
16726 return result;
16727 };
16728
16729 var parseTrun = trun;
16730
16731 var tfhd = function tfhd(data) {
16732 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
16733 result = {
16734 version: data[0],
16735 flags: new Uint8Array(data.subarray(1, 4)),
16736 trackId: view.getUint32(4)
16737 },
16738 baseDataOffsetPresent = result.flags[2] & 0x01,
16739 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
16740 defaultSampleDurationPresent = result.flags[2] & 0x08,
16741 defaultSampleSizePresent = result.flags[2] & 0x10,
16742 defaultSampleFlagsPresent = result.flags[2] & 0x20,
16743 durationIsEmpty = result.flags[0] & 0x010000,
16744 defaultBaseIsMoof = result.flags[0] & 0x020000,
16745 i;
16746 i = 8;
16747
16748 if (baseDataOffsetPresent) {
16749 i += 4; // truncate top 4 bytes
16750 // FIXME: should we read the full 64 bits?
16751
16752 result.baseDataOffset = view.getUint32(12);
16753 i += 4;
16754 }
16755
16756 if (sampleDescriptionIndexPresent) {
16757 result.sampleDescriptionIndex = view.getUint32(i);
16758 i += 4;
16759 }
16760
16761 if (defaultSampleDurationPresent) {
16762 result.defaultSampleDuration = view.getUint32(i);
16763 i += 4;
16764 }
16765
16766 if (defaultSampleSizePresent) {
16767 result.defaultSampleSize = view.getUint32(i);
16768 i += 4;
16769 }
16770
16771 if (defaultSampleFlagsPresent) {
16772 result.defaultSampleFlags = view.getUint32(i);
16773 }
16774
16775 if (durationIsEmpty) {
16776 result.durationIsEmpty = true;
16777 }
16778
16779 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
16780 result.baseDataOffsetIsMoof = true;
16781 }
16782
16783 return result;
16784 };
16785
16786 var parseTfhd = tfhd;
16787 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
16788 var win;
16789
16790 if (typeof window !== "undefined") {
16791 win = window;
16792 } else if (typeof commonjsGlobal !== "undefined") {
16793 win = commonjsGlobal;
16794 } else if (typeof self !== "undefined") {
16795 win = self;
16796 } else {
16797 win = {};
16798 }
16799
16800 var window_1 = win;
16801 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
16802 var CaptionStream = captionStream.CaptionStream;
16803 /**
16804 * Maps an offset in the mdat to a sample based on the the size of the samples.
16805 * Assumes that `parseSamples` has been called first.
16806 *
16807 * @param {Number} offset - The offset into the mdat
16808 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
16809 * @return {?Object} The matching sample, or null if no match was found.
16810 *
16811 * @see ISO-BMFF-12/2015, Section 8.8.8
16812 **/
16813
16814 var mapToSample = function mapToSample(offset, samples) {
16815 var approximateOffset = offset;
16816
16817 for (var i = 0; i < samples.length; i++) {
16818 var sample = samples[i];
16819
16820 if (approximateOffset < sample.size) {
16821 return sample;
16822 }
16823
16824 approximateOffset -= sample.size;
16825 }
16826
16827 return null;
16828 };
16829 /**
16830 * Finds SEI nal units contained in a Media Data Box.
16831 * Assumes that `parseSamples` has been called first.
16832 *
16833 * @param {Uint8Array} avcStream - The bytes of the mdat
16834 * @param {Object[]} samples - The samples parsed out by `parseSamples`
16835 * @param {Number} trackId - The trackId of this video track
16836 * @return {Object[]} seiNals - the parsed SEI NALUs found.
16837 * The contents of the seiNal should match what is expected by
16838 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
16839 *
16840 * @see ISO-BMFF-12/2015, Section 8.1.1
16841 * @see Rec. ITU-T H.264, 7.3.2.3.1
16842 **/
16843
16844
16845 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
16846 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
16847 result = {
16848 logs: [],
16849 seiNals: []
16850 },
16851 seiNal,
16852 i,
16853 length,
16854 lastMatchedSample;
16855
16856 for (i = 0; i + 4 < avcStream.length; i += length) {
16857 length = avcView.getUint32(i);
16858 i += 4; // Bail if this doesn't appear to be an H264 stream
16859
16860 if (length <= 0) {
16861 continue;
16862 }
16863
16864 switch (avcStream[i] & 0x1F) {
16865 case 0x06:
16866 var data = avcStream.subarray(i + 1, i + 1 + length);
16867 var matchingSample = mapToSample(i, samples);
16868 seiNal = {
16869 nalUnitType: 'sei_rbsp',
16870 size: length,
16871 data: data,
16872 escapedRBSP: discardEmulationPreventionBytes(data),
16873 trackId: trackId
16874 };
16875
16876 if (matchingSample) {
16877 seiNal.pts = matchingSample.pts;
16878 seiNal.dts = matchingSample.dts;
16879 lastMatchedSample = matchingSample;
16880 } else if (lastMatchedSample) {
16881 // If a matching sample cannot be found, use the last
16882 // sample's values as they should be as close as possible
16883 seiNal.pts = lastMatchedSample.pts;
16884 seiNal.dts = lastMatchedSample.dts;
16885 } else {
16886 result.logs.push({
16887 level: 'warn',
16888 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
16889 });
16890 break;
16891 }
16892
16893 result.seiNals.push(seiNal);
16894 break;
16895 }
16896 }
16897
16898 return result;
16899 };
16900 /**
16901 * Parses sample information out of Track Run Boxes and calculates
16902 * the absolute presentation and decode timestamps of each sample.
16903 *
16904 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
16905 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
16906 @see ISO-BMFF-12/2015, Section 8.8.12
16907 * @param {Object} tfhd - The parsed Track Fragment Header
16908 * @see inspect.parseTfhd
16909 * @return {Object[]} the parsed samples
16910 *
16911 * @see ISO-BMFF-12/2015, Section 8.8.8
16912 **/
16913
16914
16915 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
16916 var currentDts = baseMediaDecodeTime;
16917 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
16918 var defaultSampleSize = tfhd.defaultSampleSize || 0;
16919 var trackId = tfhd.trackId;
16920 var allSamples = [];
16921 truns.forEach(function (trun) {
16922 // Note: We currently do not parse the sample table as well
16923 // as the trun. It's possible some sources will require this.
16924 // moov > trak > mdia > minf > stbl
16925 var trackRun = parseTrun(trun);
16926 var samples = trackRun.samples;
16927 samples.forEach(function (sample) {
16928 if (sample.duration === undefined) {
16929 sample.duration = defaultSampleDuration;
16930 }
16931
16932 if (sample.size === undefined) {
16933 sample.size = defaultSampleSize;
16934 }
16935
16936 sample.trackId = trackId;
16937 sample.dts = currentDts;
16938
16939 if (sample.compositionTimeOffset === undefined) {
16940 sample.compositionTimeOffset = 0;
16941 }
16942
16943 if (typeof currentDts === 'bigint') {
16944 sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
16945 currentDts += window_1.BigInt(sample.duration);
16946 } else {
16947 sample.pts = currentDts + sample.compositionTimeOffset;
16948 currentDts += sample.duration;
16949 }
16950 });
16951 allSamples = allSamples.concat(samples);
16952 });
16953 return allSamples;
16954 };
16955 /**
16956 * Parses out caption nals from an FMP4 segment's video tracks.
16957 *
16958 * @param {Uint8Array} segment - The bytes of a single segment
16959 * @param {Number} videoTrackId - The trackId of a video track in the segment
16960 * @return {Object.<Number, Object[]>} A mapping of video trackId to
16961 * a list of seiNals found in that track
16962 **/
16963
16964
16965 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
16966 // To get the samples
16967 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
16968
16969 var mdats = findBox_1(segment, ['mdat']);
16970 var captionNals = {};
16971 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
16972
16973 mdats.forEach(function (mdat, index) {
16974 var matchingTraf = trafs[index];
16975 mdatTrafPairs.push({
16976 mdat: mdat,
16977 traf: matchingTraf
16978 });
16979 });
16980 mdatTrafPairs.forEach(function (pair) {
16981 var mdat = pair.mdat;
16982 var traf = pair.traf;
16983 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
16984
16985 var headerInfo = parseTfhd(tfhd[0]);
16986 var trackId = headerInfo.trackId;
16987 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
16988
16989 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
16990 var truns = findBox_1(traf, ['trun']);
16991 var samples;
16992 var result; // Only parse video data for the chosen video track
16993
16994 if (videoTrackId === trackId && truns.length > 0) {
16995 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
16996 result = findSeiNals(mdat, samples, trackId);
16997
16998 if (!captionNals[trackId]) {
16999 captionNals[trackId] = {
17000 seiNals: [],
17001 logs: []
17002 };
17003 }
17004
17005 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
17006 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
17007 }
17008 });
17009 return captionNals;
17010 };
17011 /**
17012 * Parses out inband captions from an MP4 container and returns
17013 * caption objects that can be used by WebVTT and the TextTrack API.
17014 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
17015 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
17016 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
17017 *
17018 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
17019 * @param {Number} trackId - The id of the video track to parse
17020 * @param {Number} timescale - The timescale for the video track from the init segment
17021 *
17022 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
17023 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
17024 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
17025 * @return {String} parsedCaptions[].text - The visible content of the caption
17026 **/
17027
17028
17029 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
17030 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
17031
17032 if (trackId === null) {
17033 return null;
17034 }
17035
17036 captionNals = parseCaptionNals(segment, trackId);
17037 var trackNals = captionNals[trackId] || {};
17038 return {
17039 seiNals: trackNals.seiNals,
17040 logs: trackNals.logs,
17041 timescale: timescale
17042 };
17043 };
17044 /**
17045 * Converts SEI NALUs into captions that can be used by video.js
17046 **/
17047
17048
17049 var CaptionParser = function CaptionParser() {
17050 var isInitialized = false;
17051 var captionStream; // Stores segments seen before trackId and timescale are set
17052
17053 var segmentCache; // Stores video track ID of the track being parsed
17054
17055 var trackId; // Stores the timescale of the track being parsed
17056
17057 var timescale; // Stores captions parsed so far
17058
17059 var parsedCaptions; // Stores whether we are receiving partial data or not
17060
17061 var parsingPartial;
17062 /**
17063 * A method to indicate whether a CaptionParser has been initalized
17064 * @returns {Boolean}
17065 **/
17066
17067 this.isInitialized = function () {
17068 return isInitialized;
17069 };
17070 /**
17071 * Initializes the underlying CaptionStream, SEI NAL parsing
17072 * and management, and caption collection
17073 **/
17074
17075
17076 this.init = function (options) {
17077 captionStream = new CaptionStream();
17078 isInitialized = true;
17079 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
17080
17081 captionStream.on('data', function (event) {
17082 // Convert to seconds in the source's timescale
17083 event.startTime = event.startPts / timescale;
17084 event.endTime = event.endPts / timescale;
17085 parsedCaptions.captions.push(event);
17086 parsedCaptions.captionStreams[event.stream] = true;
17087 });
17088 captionStream.on('log', function (log) {
17089 parsedCaptions.logs.push(log);
17090 });
17091 };
17092 /**
17093 * Determines if a new video track will be selected
17094 * or if the timescale changed
17095 * @return {Boolean}
17096 **/
17097
17098
17099 this.isNewInit = function (videoTrackIds, timescales) {
17100 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
17101 return false;
17102 }
17103
17104 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
17105 };
17106 /**
17107 * Parses out SEI captions and interacts with underlying
17108 * CaptionStream to return dispatched captions
17109 *
17110 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
17111 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
17112 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
17113 * @see parseEmbeddedCaptions
17114 * @see m2ts/caption-stream.js
17115 **/
17116
17117
17118 this.parse = function (segment, videoTrackIds, timescales) {
17119 var parsedData;
17120
17121 if (!this.isInitialized()) {
17122 return null; // This is not likely to be a video segment
17123 } else if (!videoTrackIds || !timescales) {
17124 return null;
17125 } else if (this.isNewInit(videoTrackIds, timescales)) {
17126 // Use the first video track only as there is no
17127 // mechanism to switch to other video tracks
17128 trackId = videoTrackIds[0];
17129 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
17130 // data until we have one.
17131 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
17132 } else if (trackId === null || !timescale) {
17133 segmentCache.push(segment);
17134 return null;
17135 } // Now that a timescale and trackId is set, parse cached segments
17136
17137
17138 while (segmentCache.length > 0) {
17139 var cachedSegment = segmentCache.shift();
17140 this.parse(cachedSegment, videoTrackIds, timescales);
17141 }
17142
17143 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
17144
17145 if (parsedData && parsedData.logs) {
17146 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
17147 }
17148
17149 if (parsedData === null || !parsedData.seiNals) {
17150 if (parsedCaptions.logs.length) {
17151 return {
17152 logs: parsedCaptions.logs,
17153 captions: [],
17154 captionStreams: []
17155 };
17156 }
17157
17158 return null;
17159 }
17160
17161 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
17162
17163 this.flushStream();
17164 return parsedCaptions;
17165 };
17166 /**
17167 * Pushes SEI NALUs onto CaptionStream
17168 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
17169 * Assumes that `parseCaptionNals` has been called first
17170 * @see m2ts/caption-stream.js
17171 **/
17172
17173
17174 this.pushNals = function (nals) {
17175 if (!this.isInitialized() || !nals || nals.length === 0) {
17176 return null;
17177 }
17178
17179 nals.forEach(function (nal) {
17180 captionStream.push(nal);
17181 });
17182 };
17183 /**
17184 * Flushes underlying CaptionStream to dispatch processed, displayable captions
17185 * @see m2ts/caption-stream.js
17186 **/
17187
17188
17189 this.flushStream = function () {
17190 if (!this.isInitialized()) {
17191 return null;
17192 }
17193
17194 if (!parsingPartial) {
17195 captionStream.flush();
17196 } else {
17197 captionStream.partialFlush();
17198 }
17199 };
17200 /**
17201 * Reset caption buckets for new data
17202 **/
17203
17204
17205 this.clearParsedCaptions = function () {
17206 parsedCaptions.captions = [];
17207 parsedCaptions.captionStreams = {};
17208 parsedCaptions.logs = [];
17209 };
17210 /**
17211 * Resets underlying CaptionStream
17212 * @see m2ts/caption-stream.js
17213 **/
17214
17215
17216 this.resetCaptionStream = function () {
17217 if (!this.isInitialized()) {
17218 return null;
17219 }
17220
17221 captionStream.reset();
17222 };
17223 /**
17224 * Convenience method to clear all captions flushed from the
17225 * CaptionStream and still being parsed
17226 * @see m2ts/caption-stream.js
17227 **/
17228
17229
17230 this.clearAllCaptions = function () {
17231 this.clearParsedCaptions();
17232 this.resetCaptionStream();
17233 };
17234 /**
17235 * Reset caption parser
17236 **/
17237
17238
17239 this.reset = function () {
17240 segmentCache = [];
17241 trackId = null;
17242 timescale = null;
17243
17244 if (!parsedCaptions) {
17245 parsedCaptions = {
17246 captions: [],
17247 // CC1, CC2, CC3, CC4
17248 captionStreams: {},
17249 logs: []
17250 };
17251 } else {
17252 this.clearParsedCaptions();
17253 }
17254
17255 this.resetCaptionStream();
17256 };
17257
17258 this.reset();
17259 };
17260
17261 var captionParser = CaptionParser;
17262 var toUnsigned = bin.toUnsigned;
17263 var toHexString = bin.toHexString;
17264 var getUint64 = numbers.getUint64;
17265 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
17266 /**
17267 * Parses an MP4 initialization segment and extracts the timescale
17268 * values for any declared tracks. Timescale values indicate the
17269 * number of clock ticks per second to assume for time-based values
17270 * elsewhere in the MP4.
17271 *
17272 * To determine the start time of an MP4, you need two pieces of
17273 * information: the timescale unit and the earliest base media decode
17274 * time. Multiple timescales can be specified within an MP4 but the
17275 * base media decode time is always expressed in the timescale from
17276 * the media header box for the track:
17277 * ```
17278 * moov > trak > mdia > mdhd.timescale
17279 * ```
17280 * @param init {Uint8Array} the bytes of the init segment
17281 * @return {object} a hash of track ids to timescale values or null if
17282 * the init segment is malformed.
17283 */
17284
17285 timescale = function timescale(init) {
17286 var result = {},
17287 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
17288
17289 return traks.reduce(function (result, trak) {
17290 var tkhd, version, index, id, mdhd;
17291 tkhd = findBox_1(trak, ['tkhd'])[0];
17292
17293 if (!tkhd) {
17294 return null;
17295 }
17296
17297 version = tkhd[0];
17298 index = version === 0 ? 12 : 20;
17299 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
17300 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17301
17302 if (!mdhd) {
17303 return null;
17304 }
17305
17306 version = mdhd[0];
17307 index = version === 0 ? 12 : 20;
17308 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17309 return result;
17310 }, result);
17311 };
17312 /**
17313 * Determine the base media decode start time, in seconds, for an MP4
17314 * fragment. If multiple fragments are specified, the earliest time is
17315 * returned.
17316 *
17317 * The base media decode time can be parsed from track fragment
17318 * metadata:
17319 * ```
17320 * moof > traf > tfdt.baseMediaDecodeTime
17321 * ```
17322 * It requires the timescale value from the mdhd to interpret.
17323 *
17324 * @param timescale {object} a hash of track ids to timescale values.
17325 * @return {number} the earliest base media decode start time for the
17326 * fragment, in seconds
17327 */
17328
17329
17330 startTime = function startTime(timescale, fragment) {
17331 var trafs; // we need info from two childrend of each track fragment box
17332
17333 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
17334
17335 var lowestTime = trafs.reduce(function (acc, traf) {
17336 var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
17337
17338 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
17339
17340 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
17341
17342 var tfdt = findBox_1(traf, ['tfdt'])[0];
17343 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
17344 var baseTime; // version 1 is 64 bit
17345
17346 if (tfdt[0] === 1) {
17347 baseTime = getUint64(tfdt.subarray(4, 12));
17348 } else {
17349 baseTime = dv.getUint32(4);
17350 } // convert base time to seconds if it is a valid number.
17351
17352
17353 var seconds;
17354
17355 if (typeof baseTime === 'bigint') {
17356 seconds = baseTime / window_1.BigInt(scale);
17357 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
17358 seconds = baseTime / scale;
17359 }
17360
17361 if (seconds < Number.MAX_SAFE_INTEGER) {
17362 seconds = Number(seconds);
17363 }
17364
17365 if (seconds < acc) {
17366 acc = seconds;
17367 }
17368
17369 return acc;
17370 }, Infinity);
17371 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
17372 };
17373 /**
17374 * Determine the composition start, in seconds, for an MP4
17375 * fragment.
17376 *
17377 * The composition start time of a fragment can be calculated using the base
17378 * media decode time, composition time offset, and timescale, as follows:
17379 *
17380 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
17381 *
17382 * All of the aforementioned information is contained within a media fragment's
17383 * `traf` box, except for timescale info, which comes from the initialization
17384 * segment, so a track id (also contained within a `traf`) is also necessary to
17385 * associate it with a timescale
17386 *
17387 *
17388 * @param timescales {object} - a hash of track ids to timescale values.
17389 * @param fragment {Unit8Array} - the bytes of a media segment
17390 * @return {number} the composition start time for the fragment, in seconds
17391 **/
17392
17393
17394 compositionStartTime = function compositionStartTime(timescales, fragment) {
17395 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
17396 var baseMediaDecodeTime = 0;
17397 var compositionTimeOffset = 0;
17398 var trackId;
17399
17400 if (trafBoxes && trafBoxes.length) {
17401 // The spec states that track run samples contained within a `traf` box are contiguous, but
17402 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
17403 // We will assume that they are, so we only need the first to calculate start time.
17404 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
17405 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
17406 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
17407
17408 if (tfhd) {
17409 var parsedTfhd = parseTfhd(tfhd);
17410 trackId = parsedTfhd.trackId;
17411 }
17412
17413 if (tfdt) {
17414 var parsedTfdt = parseTfdt(tfdt);
17415 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
17416 }
17417
17418 if (trun) {
17419 var parsedTrun = parseTrun(trun);
17420
17421 if (parsedTrun.samples && parsedTrun.samples.length) {
17422 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
17423 }
17424 }
17425 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
17426 // specified.
17427
17428
17429 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
17430
17431 if (typeof baseMediaDecodeTime === 'bigint') {
17432 compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
17433 timescale = window_1.BigInt(timescale);
17434 }
17435
17436 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
17437
17438 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
17439 result = Number(result);
17440 }
17441
17442 return result;
17443 };
17444 /**
17445 * Find the trackIds of the video tracks in this source.
17446 * Found by parsing the Handler Reference and Track Header Boxes:
17447 * moov > trak > mdia > hdlr
17448 * moov > trak > tkhd
17449 *
17450 * @param {Uint8Array} init - The bytes of the init segment for this source
17451 * @return {Number[]} A list of trackIds
17452 *
17453 * @see ISO-BMFF-12/2015, Section 8.4.3
17454 **/
17455
17456
17457 getVideoTrackIds = function getVideoTrackIds(init) {
17458 var traks = findBox_1(init, ['moov', 'trak']);
17459 var videoTrackIds = [];
17460 traks.forEach(function (trak) {
17461 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
17462 var tkhds = findBox_1(trak, ['tkhd']);
17463 hdlrs.forEach(function (hdlr, index) {
17464 var handlerType = parseType_1(hdlr.subarray(8, 12));
17465 var tkhd = tkhds[index];
17466 var view;
17467 var version;
17468 var trackId;
17469
17470 if (handlerType === 'vide') {
17471 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17472 version = view.getUint8(0);
17473 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
17474 videoTrackIds.push(trackId);
17475 }
17476 });
17477 });
17478 return videoTrackIds;
17479 };
17480
17481 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
17482 // mdhd is a FullBox, meaning it will have its own version as the first byte
17483 var version = mdhd[0];
17484 var index = version === 0 ? 12 : 20;
17485 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
17486 };
17487 /**
17488 * Get all the video, audio, and hint tracks from a non fragmented
17489 * mp4 segment
17490 */
17491
17492
17493 getTracks = function getTracks(init) {
17494 var traks = findBox_1(init, ['moov', 'trak']);
17495 var tracks = [];
17496 traks.forEach(function (trak) {
17497 var track = {};
17498 var tkhd = findBox_1(trak, ['tkhd'])[0];
17499 var view, tkhdVersion; // id
17500
17501 if (tkhd) {
17502 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
17503 tkhdVersion = view.getUint8(0);
17504 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
17505 }
17506
17507 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
17508
17509 if (hdlr) {
17510 var type = parseType_1(hdlr.subarray(8, 12));
17511
17512 if (type === 'vide') {
17513 track.type = 'video';
17514 } else if (type === 'soun') {
17515 track.type = 'audio';
17516 } else {
17517 track.type = type;
17518 }
17519 } // codec
17520
17521
17522 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
17523
17524 if (stsd) {
17525 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
17526
17527 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
17528 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
17529 var codecConfig, codecConfigType;
17530
17531 if (codecBox) {
17532 // https://tools.ietf.org/html/rfc6381#section-3.3
17533 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
17534 // we don't need anything but the "config" parameter of the
17535 // avc1 codecBox
17536 codecConfig = codecBox.subarray(78);
17537 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17538
17539 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
17540 track.codec += '.'; // left padded with zeroes for single digit hex
17541 // profile idc
17542
17543 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
17544
17545 track.codec += toHexString(codecConfig[10]); // level idc
17546
17547 track.codec += toHexString(codecConfig[11]);
17548 } else {
17549 // TODO: show a warning that we couldn't parse the codec
17550 // and are using the default
17551 track.codec = 'avc1.4d400d';
17552 }
17553 } else if (/^mp4[a,v]$/i.test(track.codec)) {
17554 // we do not need anything but the streamDescriptor of the mp4a codecBox
17555 codecConfig = codecBox.subarray(28);
17556 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
17557
17558 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
17559 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
17560
17561 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
17562 } else {
17563 // TODO: show a warning that we couldn't parse the codec
17564 // and are using the default
17565 track.codec = 'mp4a.40.2';
17566 }
17567 } else {
17568 // flac, opus, etc
17569 track.codec = track.codec.toLowerCase();
17570 }
17571 }
17572 }
17573
17574 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
17575
17576 if (mdhd) {
17577 track.timescale = getTimescaleFromMediaHeader(mdhd);
17578 }
17579
17580 tracks.push(track);
17581 });
17582 return tracks;
17583 };
17584
17585 var probe$2 = {
17586 // export mp4 inspector's findBox and parseType for backwards compatibility
17587 findBox: findBox_1,
17588 parseType: parseType_1,
17589 timescale: timescale,
17590 startTime: startTime,
17591 compositionStartTime: compositionStartTime,
17592 videoTrackIds: getVideoTrackIds,
17593 tracks: getTracks,
17594 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
17595 };
17596
17597 var parsePid = function parsePid(packet) {
17598 var pid = packet[1] & 0x1f;
17599 pid <<= 8;
17600 pid |= packet[2];
17601 return pid;
17602 };
17603
17604 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
17605 return !!(packet[1] & 0x40);
17606 };
17607
17608 var parseAdaptionField = function parseAdaptionField(packet) {
17609 var offset = 0; // if an adaption field is present, its length is specified by the
17610 // fifth byte of the TS packet header. The adaptation field is
17611 // used to add stuffing to PES packets that don't fill a complete
17612 // TS packet, and to specify some forms of timing and control data
17613 // that we do not currently use.
17614
17615 if ((packet[3] & 0x30) >>> 4 > 0x01) {
17616 offset += packet[4] + 1;
17617 }
17618
17619 return offset;
17620 };
17621
17622 var parseType = function parseType(packet, pmtPid) {
17623 var pid = parsePid(packet);
17624
17625 if (pid === 0) {
17626 return 'pat';
17627 } else if (pid === pmtPid) {
17628 return 'pmt';
17629 } else if (pmtPid) {
17630 return 'pes';
17631 }
17632
17633 return null;
17634 };
17635
17636 var parsePat = function parsePat(packet) {
17637 var pusi = parsePayloadUnitStartIndicator(packet);
17638 var offset = 4 + parseAdaptionField(packet);
17639
17640 if (pusi) {
17641 offset += packet[offset] + 1;
17642 }
17643
17644 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
17645 };
17646
17647 var parsePmt = function parsePmt(packet) {
17648 var programMapTable = {};
17649 var pusi = parsePayloadUnitStartIndicator(packet);
17650 var payloadOffset = 4 + parseAdaptionField(packet);
17651
17652 if (pusi) {
17653 payloadOffset += packet[payloadOffset] + 1;
17654 } // PMTs can be sent ahead of the time when they should actually
17655 // take effect. We don't believe this should ever be the case
17656 // for HLS but we'll ignore "forward" PMT declarations if we see
17657 // them. Future PMT declarations have the current_next_indicator
17658 // set to zero.
17659
17660
17661 if (!(packet[payloadOffset + 5] & 0x01)) {
17662 return;
17663 }
17664
17665 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
17666
17667 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
17668 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
17669 // long the program info descriptors are
17670
17671 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
17672
17673 var offset = 12 + programInfoLength;
17674
17675 while (offset < tableEnd) {
17676 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
17677
17678 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
17679 // skip past the elementary stream descriptors, if present
17680
17681 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
17682 }
17683
17684 return programMapTable;
17685 };
17686
17687 var parsePesType = function parsePesType(packet, programMapTable) {
17688 var pid = parsePid(packet);
17689 var type = programMapTable[pid];
17690
17691 switch (type) {
17692 case streamTypes.H264_STREAM_TYPE:
17693 return 'video';
17694
17695 case streamTypes.ADTS_STREAM_TYPE:
17696 return 'audio';
17697
17698 case streamTypes.METADATA_STREAM_TYPE:
17699 return 'timed-metadata';
17700
17701 default:
17702 return null;
17703 }
17704 };
17705
17706 var parsePesTime = function parsePesTime(packet) {
17707 var pusi = parsePayloadUnitStartIndicator(packet);
17708
17709 if (!pusi) {
17710 return null;
17711 }
17712
17713 var offset = 4 + parseAdaptionField(packet);
17714
17715 if (offset >= packet.byteLength) {
17716 // From the H 222.0 MPEG-TS spec
17717 // "For transport stream packets carrying PES packets, stuffing is needed when there
17718 // is insufficient PES packet data to completely fill the transport stream packet
17719 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
17720 // the sum of the lengths of the data elements in it, so that the payload bytes
17721 // remaining after the adaptation field exactly accommodates the available PES packet
17722 // data."
17723 //
17724 // If the offset is >= the length of the packet, then the packet contains no data
17725 // and instead is just adaption field stuffing bytes
17726 return null;
17727 }
17728
17729 var pes = null;
17730 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
17731 // and a DTS value. Determine what combination of values is
17732 // available to work with.
17733
17734 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
17735 // performs all bitwise operations on 32-bit integers but javascript
17736 // supports a much greater range (52-bits) of integer using standard
17737 // mathematical operations.
17738 // We construct a 31-bit value using bitwise operators over the 31
17739 // most significant bits and then multiply by 4 (equal to a left-shift
17740 // of 2) before we add the final 2 least significant bits of the
17741 // timestamp (equal to an OR.)
17742
17743 if (ptsDtsFlags & 0xC0) {
17744 pes = {}; // the PTS and DTS are not written out directly. For information
17745 // on how they are encoded, see
17746 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
17747
17748 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
17749 pes.pts *= 4; // Left shift by 2
17750
17751 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
17752
17753 pes.dts = pes.pts;
17754
17755 if (ptsDtsFlags & 0x40) {
17756 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
17757 pes.dts *= 4; // Left shift by 2
17758
17759 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
17760 }
17761 }
17762
17763 return pes;
17764 };
17765
17766 var parseNalUnitType = function parseNalUnitType(type) {
17767 switch (type) {
17768 case 0x05:
17769 return 'slice_layer_without_partitioning_rbsp_idr';
17770
17771 case 0x06:
17772 return 'sei_rbsp';
17773
17774 case 0x07:
17775 return 'seq_parameter_set_rbsp';
17776
17777 case 0x08:
17778 return 'pic_parameter_set_rbsp';
17779
17780 case 0x09:
17781 return 'access_unit_delimiter_rbsp';
17782
17783 default:
17784 return null;
17785 }
17786 };
17787
17788 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
17789 var offset = 4 + parseAdaptionField(packet);
17790 var frameBuffer = packet.subarray(offset);
17791 var frameI = 0;
17792 var frameSyncPoint = 0;
17793 var foundKeyFrame = false;
17794 var nalType; // advance the sync point to a NAL start, if necessary
17795
17796 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
17797 if (frameBuffer[frameSyncPoint + 2] === 1) {
17798 // the sync point is properly aligned
17799 frameI = frameSyncPoint + 5;
17800 break;
17801 }
17802 }
17803
17804 while (frameI < frameBuffer.byteLength) {
17805 // look at the current byte to determine if we've hit the end of
17806 // a NAL unit boundary
17807 switch (frameBuffer[frameI]) {
17808 case 0:
17809 // skip past non-sync sequences
17810 if (frameBuffer[frameI - 1] !== 0) {
17811 frameI += 2;
17812 break;
17813 } else if (frameBuffer[frameI - 2] !== 0) {
17814 frameI++;
17815 break;
17816 }
17817
17818 if (frameSyncPoint + 3 !== frameI - 2) {
17819 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17820
17821 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17822 foundKeyFrame = true;
17823 }
17824 } // drop trailing zeroes
17825
17826
17827 do {
17828 frameI++;
17829 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
17830
17831 frameSyncPoint = frameI - 2;
17832 frameI += 3;
17833 break;
17834
17835 case 1:
17836 // skip past non-sync sequences
17837 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
17838 frameI += 3;
17839 break;
17840 }
17841
17842 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17843
17844 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17845 foundKeyFrame = true;
17846 }
17847
17848 frameSyncPoint = frameI - 2;
17849 frameI += 3;
17850 break;
17851
17852 default:
17853 // the current byte isn't a one or zero, so it cannot be part
17854 // of a sync sequence
17855 frameI += 3;
17856 break;
17857 }
17858 }
17859
17860 frameBuffer = frameBuffer.subarray(frameSyncPoint);
17861 frameI -= frameSyncPoint;
17862 frameSyncPoint = 0; // parse the final nal
17863
17864 if (frameBuffer && frameBuffer.byteLength > 3) {
17865 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
17866
17867 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
17868 foundKeyFrame = true;
17869 }
17870 }
17871
17872 return foundKeyFrame;
17873 };
17874
17875 var probe$1 = {
17876 parseType: parseType,
17877 parsePat: parsePat,
17878 parsePmt: parsePmt,
17879 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
17880 parsePesType: parsePesType,
17881 parsePesTime: parsePesTime,
17882 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
17883 };
17884 var handleRollover = timestampRolloverStream.handleRollover;
17885 var probe = {};
17886 probe.ts = probe$1;
17887 probe.aac = utils;
17888 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
17889 var MP2T_PACKET_LENGTH = 188,
17890 // bytes
17891 SYNC_BYTE = 0x47;
17892 /**
17893 * walks through segment data looking for pat and pmt packets to parse out
17894 * program map table information
17895 */
17896
17897 var parsePsi_ = function parsePsi_(bytes, pmt) {
17898 var startIndex = 0,
17899 endIndex = MP2T_PACKET_LENGTH,
17900 packet,
17901 type;
17902
17903 while (endIndex < bytes.byteLength) {
17904 // Look for a pair of start and end sync bytes in the data..
17905 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
17906 // We found a packet
17907 packet = bytes.subarray(startIndex, endIndex);
17908 type = probe.ts.parseType(packet, pmt.pid);
17909
17910 switch (type) {
17911 case 'pat':
17912 pmt.pid = probe.ts.parsePat(packet);
17913 break;
17914
17915 case 'pmt':
17916 var table = probe.ts.parsePmt(packet);
17917 pmt.table = pmt.table || {};
17918 Object.keys(table).forEach(function (key) {
17919 pmt.table[key] = table[key];
17920 });
17921 break;
17922 }
17923
17924 startIndex += MP2T_PACKET_LENGTH;
17925 endIndex += MP2T_PACKET_LENGTH;
17926 continue;
17927 } // If we get here, we have somehow become de-synchronized and we need to step
17928 // forward one byte at a time until we find a pair of sync bytes that denote
17929 // a packet
17930
17931
17932 startIndex++;
17933 endIndex++;
17934 }
17935 };
17936 /**
17937 * walks through the segment data from the start and end to get timing information
17938 * for the first and last audio pes packets
17939 */
17940
17941
17942 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
17943 var startIndex = 0,
17944 endIndex = MP2T_PACKET_LENGTH,
17945 packet,
17946 type,
17947 pesType,
17948 pusi,
17949 parsed;
17950 var endLoop = false; // Start walking from start of segment to get first audio packet
17951
17952 while (endIndex <= bytes.byteLength) {
17953 // Look for a pair of start and end sync bytes in the data..
17954 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
17955 // We found a packet
17956 packet = bytes.subarray(startIndex, endIndex);
17957 type = probe.ts.parseType(packet, pmt.pid);
17958
17959 switch (type) {
17960 case 'pes':
17961 pesType = probe.ts.parsePesType(packet, pmt.table);
17962 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
17963
17964 if (pesType === 'audio' && pusi) {
17965 parsed = probe.ts.parsePesTime(packet);
17966
17967 if (parsed) {
17968 parsed.type = 'audio';
17969 result.audio.push(parsed);
17970 endLoop = true;
17971 }
17972 }
17973
17974 break;
17975 }
17976
17977 if (endLoop) {
17978 break;
17979 }
17980
17981 startIndex += MP2T_PACKET_LENGTH;
17982 endIndex += MP2T_PACKET_LENGTH;
17983 continue;
17984 } // If we get here, we have somehow become de-synchronized and we need to step
17985 // forward one byte at a time until we find a pair of sync bytes that denote
17986 // a packet
17987
17988
17989 startIndex++;
17990 endIndex++;
17991 } // Start walking from end of segment to get last audio packet
17992
17993
17994 endIndex = bytes.byteLength;
17995 startIndex = endIndex - MP2T_PACKET_LENGTH;
17996 endLoop = false;
17997
17998 while (startIndex >= 0) {
17999 // Look for a pair of start and end sync bytes in the data..
18000 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
18001 // We found a packet
18002 packet = bytes.subarray(startIndex, endIndex);
18003 type = probe.ts.parseType(packet, pmt.pid);
18004
18005 switch (type) {
18006 case 'pes':
18007 pesType = probe.ts.parsePesType(packet, pmt.table);
18008 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18009
18010 if (pesType === 'audio' && pusi) {
18011 parsed = probe.ts.parsePesTime(packet);
18012
18013 if (parsed) {
18014 parsed.type = 'audio';
18015 result.audio.push(parsed);
18016 endLoop = true;
18017 }
18018 }
18019
18020 break;
18021 }
18022
18023 if (endLoop) {
18024 break;
18025 }
18026
18027 startIndex -= MP2T_PACKET_LENGTH;
18028 endIndex -= MP2T_PACKET_LENGTH;
18029 continue;
18030 } // If we get here, we have somehow become de-synchronized and we need to step
18031 // forward one byte at a time until we find a pair of sync bytes that denote
18032 // a packet
18033
18034
18035 startIndex--;
18036 endIndex--;
18037 }
18038 };
18039 /**
18040 * walks through the segment data from the start and end to get timing information
18041 * for the first and last video pes packets as well as timing information for the first
18042 * key frame.
18043 */
18044
18045
18046 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
18047 var startIndex = 0,
18048 endIndex = MP2T_PACKET_LENGTH,
18049 packet,
18050 type,
18051 pesType,
18052 pusi,
18053 parsed,
18054 frame,
18055 i,
18056 pes;
18057 var endLoop = false;
18058 var currentFrame = {
18059 data: [],
18060 size: 0
18061 }; // Start walking from start of segment to get first video packet
18062
18063 while (endIndex < bytes.byteLength) {
18064 // Look for a pair of start and end sync bytes in the data..
18065 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
18066 // We found a packet
18067 packet = bytes.subarray(startIndex, endIndex);
18068 type = probe.ts.parseType(packet, pmt.pid);
18069
18070 switch (type) {
18071 case 'pes':
18072 pesType = probe.ts.parsePesType(packet, pmt.table);
18073 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18074
18075 if (pesType === 'video') {
18076 if (pusi && !endLoop) {
18077 parsed = probe.ts.parsePesTime(packet);
18078
18079 if (parsed) {
18080 parsed.type = 'video';
18081 result.video.push(parsed);
18082 endLoop = true;
18083 }
18084 }
18085
18086 if (!result.firstKeyFrame) {
18087 if (pusi) {
18088 if (currentFrame.size !== 0) {
18089 frame = new Uint8Array(currentFrame.size);
18090 i = 0;
18091
18092 while (currentFrame.data.length) {
18093 pes = currentFrame.data.shift();
18094 frame.set(pes, i);
18095 i += pes.byteLength;
18096 }
18097
18098 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
18099 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
18100 // the keyframe seems to work fine with HLS playback
18101 // and definitely preferable to a crash with TypeError...
18102
18103 if (firstKeyFrame) {
18104 result.firstKeyFrame = firstKeyFrame;
18105 result.firstKeyFrame.type = 'video';
18106 } else {
18107 // eslint-disable-next-line
18108 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
18109 }
18110 }
18111
18112 currentFrame.size = 0;
18113 }
18114 }
18115
18116 currentFrame.data.push(packet);
18117 currentFrame.size += packet.byteLength;
18118 }
18119 }
18120
18121 break;
18122 }
18123
18124 if (endLoop && result.firstKeyFrame) {
18125 break;
18126 }
18127
18128 startIndex += MP2T_PACKET_LENGTH;
18129 endIndex += MP2T_PACKET_LENGTH;
18130 continue;
18131 } // If we get here, we have somehow become de-synchronized and we need to step
18132 // forward one byte at a time until we find a pair of sync bytes that denote
18133 // a packet
18134
18135
18136 startIndex++;
18137 endIndex++;
18138 } // Start walking from end of segment to get last video packet
18139
18140
18141 endIndex = bytes.byteLength;
18142 startIndex = endIndex - MP2T_PACKET_LENGTH;
18143 endLoop = false;
18144
18145 while (startIndex >= 0) {
18146 // Look for a pair of start and end sync bytes in the data..
18147 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
18148 // We found a packet
18149 packet = bytes.subarray(startIndex, endIndex);
18150 type = probe.ts.parseType(packet, pmt.pid);
18151
18152 switch (type) {
18153 case 'pes':
18154 pesType = probe.ts.parsePesType(packet, pmt.table);
18155 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
18156
18157 if (pesType === 'video' && pusi) {
18158 parsed = probe.ts.parsePesTime(packet);
18159
18160 if (parsed) {
18161 parsed.type = 'video';
18162 result.video.push(parsed);
18163 endLoop = true;
18164 }
18165 }
18166
18167 break;
18168 }
18169
18170 if (endLoop) {
18171 break;
18172 }
18173
18174 startIndex -= MP2T_PACKET_LENGTH;
18175 endIndex -= MP2T_PACKET_LENGTH;
18176 continue;
18177 } // If we get here, we have somehow become de-synchronized and we need to step
18178 // forward one byte at a time until we find a pair of sync bytes that denote
18179 // a packet
18180
18181
18182 startIndex--;
18183 endIndex--;
18184 }
18185 };
18186 /**
18187 * Adjusts the timestamp information for the segment to account for
18188 * rollover and convert to seconds based on pes packet timescale (90khz clock)
18189 */
18190
18191
18192 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
18193 if (segmentInfo.audio && segmentInfo.audio.length) {
18194 var audioBaseTimestamp = baseTimestamp;
18195
18196 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
18197 audioBaseTimestamp = segmentInfo.audio[0].dts;
18198 }
18199
18200 segmentInfo.audio.forEach(function (info) {
18201 info.dts = handleRollover(info.dts, audioBaseTimestamp);
18202 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
18203
18204 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18205 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18206 });
18207 }
18208
18209 if (segmentInfo.video && segmentInfo.video.length) {
18210 var videoBaseTimestamp = baseTimestamp;
18211
18212 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
18213 videoBaseTimestamp = segmentInfo.video[0].dts;
18214 }
18215
18216 segmentInfo.video.forEach(function (info) {
18217 info.dts = handleRollover(info.dts, videoBaseTimestamp);
18218 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
18219
18220 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
18221 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
18222 });
18223
18224 if (segmentInfo.firstKeyFrame) {
18225 var frame = segmentInfo.firstKeyFrame;
18226 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
18227 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
18228
18229 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
18230 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
18231 }
18232 }
18233 };
18234 /**
18235 * inspects the aac data stream for start and end time information
18236 */
18237
18238
18239 var inspectAac_ = function inspectAac_(bytes) {
18240 var endLoop = false,
18241 audioCount = 0,
18242 sampleRate = null,
18243 timestamp = null,
18244 frameSize = 0,
18245 byteIndex = 0,
18246 packet;
18247
18248 while (bytes.length - byteIndex >= 3) {
18249 var type = probe.aac.parseType(bytes, byteIndex);
18250
18251 switch (type) {
18252 case 'timed-metadata':
18253 // Exit early because we don't have enough to parse
18254 // the ID3 tag header
18255 if (bytes.length - byteIndex < 10) {
18256 endLoop = true;
18257 break;
18258 }
18259
18260 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18261 // to emit a full packet
18262
18263 if (frameSize > bytes.length) {
18264 endLoop = true;
18265 break;
18266 }
18267
18268 if (timestamp === null) {
18269 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18270 timestamp = probe.aac.parseAacTimestamp(packet);
18271 }
18272
18273 byteIndex += frameSize;
18274 break;
18275
18276 case 'audio':
18277 // Exit early because we don't have enough to parse
18278 // the ADTS frame header
18279 if (bytes.length - byteIndex < 7) {
18280 endLoop = true;
18281 break;
18282 }
18283
18284 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
18285 // to emit a full packet
18286
18287 if (frameSize > bytes.length) {
18288 endLoop = true;
18289 break;
18290 }
18291
18292 if (sampleRate === null) {
18293 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
18294 sampleRate = probe.aac.parseSampleRate(packet);
18295 }
18296
18297 audioCount++;
18298 byteIndex += frameSize;
18299 break;
18300
18301 default:
18302 byteIndex++;
18303 break;
18304 }
18305
18306 if (endLoop) {
18307 return null;
18308 }
18309 }
18310
18311 if (sampleRate === null || timestamp === null) {
18312 return null;
18313 }
18314
18315 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
18316 var result = {
18317 audio: [{
18318 type: 'audio',
18319 dts: timestamp,
18320 pts: timestamp
18321 }, {
18322 type: 'audio',
18323 dts: timestamp + audioCount * 1024 * audioTimescale,
18324 pts: timestamp + audioCount * 1024 * audioTimescale
18325 }]
18326 };
18327 return result;
18328 };
18329 /**
18330 * inspects the transport stream segment data for start and end time information
18331 * of the audio and video tracks (when present) as well as the first key frame's
18332 * start time.
18333 */
18334
18335
18336 var inspectTs_ = function inspectTs_(bytes) {
18337 var pmt = {
18338 pid: null,
18339 table: null
18340 };
18341 var result = {};
18342 parsePsi_(bytes, pmt);
18343
18344 for (var pid in pmt.table) {
18345 if (pmt.table.hasOwnProperty(pid)) {
18346 var type = pmt.table[pid];
18347
18348 switch (type) {
18349 case streamTypes.H264_STREAM_TYPE:
18350 result.video = [];
18351 parseVideoPes_(bytes, pmt, result);
18352
18353 if (result.video.length === 0) {
18354 delete result.video;
18355 }
18356
18357 break;
18358
18359 case streamTypes.ADTS_STREAM_TYPE:
18360 result.audio = [];
18361 parseAudioPes_(bytes, pmt, result);
18362
18363 if (result.audio.length === 0) {
18364 delete result.audio;
18365 }
18366
18367 break;
18368 }
18369 }
18370 }
18371
18372 return result;
18373 };
18374 /**
18375 * Inspects segment byte data and returns an object with start and end timing information
18376 *
18377 * @param {Uint8Array} bytes The segment byte data
18378 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
18379 * timestamps for rollover. This value must be in 90khz clock.
18380 * @return {Object} Object containing start and end frame timing info of segment.
18381 */
18382
18383
18384 var inspect = function inspect(bytes, baseTimestamp) {
18385 var isAacData = probe.aac.isLikelyAacData(bytes);
18386 var result;
18387
18388 if (isAacData) {
18389 result = inspectAac_(bytes);
18390 } else {
18391 result = inspectTs_(bytes);
18392 }
18393
18394 if (!result || !result.audio && !result.video) {
18395 return null;
18396 }
18397
18398 adjustTimestamp_(result, baseTimestamp);
18399 return result;
18400 };
18401
18402 var tsInspector = {
18403 inspect: inspect,
18404 parseAudioPes_: parseAudioPes_
18405 };
18406 /* global self */
18407
18408 /**
18409 * Re-emits transmuxer events by converting them into messages to the
18410 * world outside the worker.
18411 *
18412 * @param {Object} transmuxer the transmuxer to wire events on
18413 * @private
18414 */
18415
18416 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
18417 transmuxer.on('data', function (segment) {
18418 // transfer ownership of the underlying ArrayBuffer
18419 // instead of doing a copy to save memory
18420 // ArrayBuffers are transferable but generic TypedArrays are not
18421 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
18422 var initArray = segment.initSegment;
18423 segment.initSegment = {
18424 data: initArray.buffer,
18425 byteOffset: initArray.byteOffset,
18426 byteLength: initArray.byteLength
18427 };
18428 var typedArray = segment.data;
18429 segment.data = typedArray.buffer;
18430 self.postMessage({
18431 action: 'data',
18432 segment: segment,
18433 byteOffset: typedArray.byteOffset,
18434 byteLength: typedArray.byteLength
18435 }, [segment.data]);
18436 });
18437 transmuxer.on('done', function (data) {
18438 self.postMessage({
18439 action: 'done'
18440 });
18441 });
18442 transmuxer.on('gopInfo', function (gopInfo) {
18443 self.postMessage({
18444 action: 'gopInfo',
18445 gopInfo: gopInfo
18446 });
18447 });
18448 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
18449 var videoSegmentTimingInfo = {
18450 start: {
18451 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18452 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18453 },
18454 end: {
18455 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18456 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18457 },
18458 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18459 };
18460
18461 if (timingInfo.prependedContentDuration) {
18462 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18463 }
18464
18465 self.postMessage({
18466 action: 'videoSegmentTimingInfo',
18467 videoSegmentTimingInfo: videoSegmentTimingInfo
18468 });
18469 });
18470 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
18471 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
18472 var audioSegmentTimingInfo = {
18473 start: {
18474 decode: clock.videoTsToSeconds(timingInfo.start.dts),
18475 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
18476 },
18477 end: {
18478 decode: clock.videoTsToSeconds(timingInfo.end.dts),
18479 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
18480 },
18481 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
18482 };
18483
18484 if (timingInfo.prependedContentDuration) {
18485 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
18486 }
18487
18488 self.postMessage({
18489 action: 'audioSegmentTimingInfo',
18490 audioSegmentTimingInfo: audioSegmentTimingInfo
18491 });
18492 });
18493 transmuxer.on('id3Frame', function (id3Frame) {
18494 self.postMessage({
18495 action: 'id3Frame',
18496 id3Frame: id3Frame
18497 });
18498 });
18499 transmuxer.on('caption', function (caption) {
18500 self.postMessage({
18501 action: 'caption',
18502 caption: caption
18503 });
18504 });
18505 transmuxer.on('trackinfo', function (trackInfo) {
18506 self.postMessage({
18507 action: 'trackinfo',
18508 trackInfo: trackInfo
18509 });
18510 });
18511 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
18512 // convert to video TS since we prioritize video time over audio
18513 self.postMessage({
18514 action: 'audioTimingInfo',
18515 audioTimingInfo: {
18516 start: clock.videoTsToSeconds(audioTimingInfo.start),
18517 end: clock.videoTsToSeconds(audioTimingInfo.end)
18518 }
18519 });
18520 });
18521 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
18522 self.postMessage({
18523 action: 'videoTimingInfo',
18524 videoTimingInfo: {
18525 start: clock.videoTsToSeconds(videoTimingInfo.start),
18526 end: clock.videoTsToSeconds(videoTimingInfo.end)
18527 }
18528 });
18529 });
18530 transmuxer.on('log', function (log) {
18531 self.postMessage({
18532 action: 'log',
18533 log: log
18534 });
18535 });
18536 };
18537 /**
18538 * All incoming messages route through this hash. If no function exists
18539 * to handle an incoming message, then we ignore the message.
18540 *
18541 * @class MessageHandlers
18542 * @param {Object} options the options to initialize with
18543 */
18544
18545
18546 var MessageHandlers = /*#__PURE__*/function () {
18547 function MessageHandlers(self, options) {
18548 this.options = options || {};
18549 this.self = self;
18550 this.init();
18551 }
18552 /**
18553 * initialize our web worker and wire all the events.
18554 */
18555
18556
18557 var _proto = MessageHandlers.prototype;
18558
18559 _proto.init = function init() {
18560 if (this.transmuxer) {
18561 this.transmuxer.dispose();
18562 }
18563
18564 this.transmuxer = new transmuxer.Transmuxer(this.options);
18565 wireTransmuxerEvents(this.self, this.transmuxer);
18566 };
18567
18568 _proto.pushMp4Captions = function pushMp4Captions(data) {
18569 if (!this.captionParser) {
18570 this.captionParser = new captionParser();
18571 this.captionParser.init();
18572 }
18573
18574 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18575 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
18576 this.self.postMessage({
18577 action: 'mp4Captions',
18578 captions: parsed && parsed.captions || [],
18579 logs: parsed && parsed.logs || [],
18580 data: segment.buffer
18581 }, [segment.buffer]);
18582 };
18583
18584 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
18585 var timescales = _ref.timescales,
18586 data = _ref.data;
18587 var startTime = probe$2.startTime(timescales, data);
18588 this.self.postMessage({
18589 action: 'probeMp4StartTime',
18590 startTime: startTime,
18591 data: data
18592 }, [data.buffer]);
18593 };
18594
18595 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
18596 var data = _ref2.data;
18597 var tracks = probe$2.tracks(data);
18598 this.self.postMessage({
18599 action: 'probeMp4Tracks',
18600 tracks: tracks,
18601 data: data
18602 }, [data.buffer]);
18603 }
18604 /**
18605 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
18606 * internal "media time," as well as whether it contains video and/or audio.
18607 *
18608 * @private
18609 * @param {Uint8Array} bytes - segment bytes
18610 * @param {number} baseStartTime
18611 * Relative reference timestamp used when adjusting frame timestamps for rollover.
18612 * This value should be in seconds, as it's converted to a 90khz clock within the
18613 * function body.
18614 * @return {Object} The start time of the current segment in "media time" as well as
18615 * whether it contains video and/or audio
18616 */
18617 ;
18618
18619 _proto.probeTs = function probeTs(_ref3) {
18620 var data = _ref3.data,
18621 baseStartTime = _ref3.baseStartTime;
18622 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
18623 var timeInfo = tsInspector.inspect(data, tsStartTime);
18624 var result = null;
18625
18626 if (timeInfo) {
18627 result = {
18628 // each type's time info comes back as an array of 2 times, start and end
18629 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
18630 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
18631 };
18632
18633 if (result.hasVideo) {
18634 result.videoStart = timeInfo.video[0].ptsTime;
18635 }
18636
18637 if (result.hasAudio) {
18638 result.audioStart = timeInfo.audio[0].ptsTime;
18639 }
18640 }
18641
18642 this.self.postMessage({
18643 action: 'probeTs',
18644 result: result,
18645 data: data
18646 }, [data.buffer]);
18647 };
18648
18649 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
18650 if (this.captionParser) {
18651 this.captionParser.clearAllCaptions();
18652 }
18653 };
18654
18655 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
18656 if (this.captionParser) {
18657 this.captionParser.clearParsedCaptions();
18658 }
18659 }
18660 /**
18661 * Adds data (a ts segment) to the start of the transmuxer pipeline for
18662 * processing.
18663 *
18664 * @param {ArrayBuffer} data data to push into the muxer
18665 */
18666 ;
18667
18668 _proto.push = function push(data) {
18669 // Cast array buffer to correct type for transmuxer
18670 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
18671 this.transmuxer.push(segment);
18672 }
18673 /**
18674 * Recreate the transmuxer so that the next segment added via `push`
18675 * start with a fresh transmuxer.
18676 */
18677 ;
18678
18679 _proto.reset = function reset() {
18680 this.transmuxer.reset();
18681 }
18682 /**
18683 * Set the value that will be used as the `baseMediaDecodeTime` time for the
18684 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
18685 * set relative to the first based on the PTS values.
18686 *
18687 * @param {Object} data used to set the timestamp offset in the muxer
18688 */
18689 ;
18690
18691 _proto.setTimestampOffset = function setTimestampOffset(data) {
18692 var timestampOffset = data.timestampOffset || 0;
18693 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
18694 };
18695
18696 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
18697 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
18698 };
18699
18700 _proto.setRemux = function setRemux(data) {
18701 this.transmuxer.setRemux(data.remux);
18702 }
18703 /**
18704 * Forces the pipeline to finish processing the last segment and emit it's
18705 * results.
18706 *
18707 * @param {Object} data event data, not really used
18708 */
18709 ;
18710
18711 _proto.flush = function flush(data) {
18712 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
18713
18714 self.postMessage({
18715 action: 'done',
18716 type: 'transmuxed'
18717 });
18718 };
18719
18720 _proto.endTimeline = function endTimeline() {
18721 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
18722 // timelines
18723
18724 self.postMessage({
18725 action: 'endedtimeline',
18726 type: 'transmuxed'
18727 });
18728 };
18729
18730 _proto.alignGopsWith = function alignGopsWith(data) {
18731 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
18732 };
18733
18734 return MessageHandlers;
18735 }();
18736 /**
18737 * Our web worker interface so that things can talk to mux.js
18738 * that will be running in a web worker. the scope is passed to this by
18739 * webworkify.
18740 *
18741 * @param {Object} self the scope for the web worker
18742 */
18743
18744
18745 self.onmessage = function (event) {
18746 if (event.data.action === 'init' && event.data.options) {
18747 this.messageHandlers = new MessageHandlers(self, event.data.options);
18748 return;
18749 }
18750
18751 if (!this.messageHandlers) {
18752 this.messageHandlers = new MessageHandlers(self);
18753 }
18754
18755 if (event.data && event.data.action && event.data.action !== 'init') {
18756 if (this.messageHandlers[event.data.action]) {
18757 this.messageHandlers[event.data.action](event.data);
18758 }
18759 }
18760 };
18761 }));
18762 var TransmuxWorker = factory(workerCode$1);
18763 /* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/http-streaming/src/transmuxer-worker.js */
18764
18765 var handleData_ = function handleData_(event, transmuxedData, callback) {
18766 var _event$data$segment = event.data.segment,
18767 type = _event$data$segment.type,
18768 initSegment = _event$data$segment.initSegment,
18769 captions = _event$data$segment.captions,
18770 captionStreams = _event$data$segment.captionStreams,
18771 metadata = _event$data$segment.metadata,
18772 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
18773 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
18774 transmuxedData.buffer.push({
18775 captions: captions,
18776 captionStreams: captionStreams,
18777 metadata: metadata
18778 });
18779 var boxes = event.data.segment.boxes || {
18780 data: event.data.segment.data
18781 };
18782 var result = {
18783 type: type,
18784 // cast ArrayBuffer to TypedArray
18785 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
18786 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
18787 };
18788
18789 if (typeof videoFrameDtsTime !== 'undefined') {
18790 result.videoFrameDtsTime = videoFrameDtsTime;
18791 }
18792
18793 if (typeof videoFramePtsTime !== 'undefined') {
18794 result.videoFramePtsTime = videoFramePtsTime;
18795 }
18796
18797 callback(result);
18798 };
18799 var handleDone_ = function handleDone_(_ref) {
18800 var transmuxedData = _ref.transmuxedData,
18801 callback = _ref.callback;
18802 // Previously we only returned data on data events,
18803 // not on done events. Clear out the buffer to keep that consistent.
18804 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
18805 // have received
18806
18807 callback(transmuxedData);
18808 };
18809 var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
18810 transmuxedData.gopInfo = event.data.gopInfo;
18811 };
18812 var processTransmux = function processTransmux(options) {
18813 var transmuxer = options.transmuxer,
18814 bytes = options.bytes,
18815 audioAppendStart = options.audioAppendStart,
18816 gopsToAlignWith = options.gopsToAlignWith,
18817 remux = options.remux,
18818 onData = options.onData,
18819 onTrackInfo = options.onTrackInfo,
18820 onAudioTimingInfo = options.onAudioTimingInfo,
18821 onVideoTimingInfo = options.onVideoTimingInfo,
18822 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
18823 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
18824 onId3 = options.onId3,
18825 onCaptions = options.onCaptions,
18826 onDone = options.onDone,
18827 onEndedTimeline = options.onEndedTimeline,
18828 onTransmuxerLog = options.onTransmuxerLog,
18829 isEndOfTimeline = options.isEndOfTimeline;
18830 var transmuxedData = {
18831 buffer: []
18832 };
18833 var waitForEndedTimelineEvent = isEndOfTimeline;
18834
18835 var handleMessage = function handleMessage(event) {
18836 if (transmuxer.currentTransmux !== options) {
18837 // disposed
18838 return;
18839 }
18840
18841 if (event.data.action === 'data') {
18842 handleData_(event, transmuxedData, onData);
18843 }
18844
18845 if (event.data.action === 'trackinfo') {
18846 onTrackInfo(event.data.trackInfo);
18847 }
18848
18849 if (event.data.action === 'gopInfo') {
18850 handleGopInfo_(event, transmuxedData);
18851 }
18852
18853 if (event.data.action === 'audioTimingInfo') {
18854 onAudioTimingInfo(event.data.audioTimingInfo);
18855 }
18856
18857 if (event.data.action === 'videoTimingInfo') {
18858 onVideoTimingInfo(event.data.videoTimingInfo);
18859 }
18860
18861 if (event.data.action === 'videoSegmentTimingInfo') {
18862 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
18863 }
18864
18865 if (event.data.action === 'audioSegmentTimingInfo') {
18866 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
18867 }
18868
18869 if (event.data.action === 'id3Frame') {
18870 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
18871 }
18872
18873 if (event.data.action === 'caption') {
18874 onCaptions(event.data.caption);
18875 }
18876
18877 if (event.data.action === 'endedtimeline') {
18878 waitForEndedTimelineEvent = false;
18879 onEndedTimeline();
18880 }
18881
18882 if (event.data.action === 'log') {
18883 onTransmuxerLog(event.data.log);
18884 } // wait for the transmuxed event since we may have audio and video
18885
18886
18887 if (event.data.type !== 'transmuxed') {
18888 return;
18889 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
18890 // of a timeline, that means there may still be data events before the segment
18891 // processing can be considerred complete. In that case, the final event should be
18892 // an "endedtimeline" event with the type "transmuxed."
18893
18894
18895 if (waitForEndedTimelineEvent) {
18896 return;
18897 }
18898
18899 transmuxer.onmessage = null;
18900 handleDone_({
18901 transmuxedData: transmuxedData,
18902 callback: onDone
18903 });
18904 /* eslint-disable no-use-before-define */
18905
18906 dequeue(transmuxer);
18907 /* eslint-enable */
18908 };
18909
18910 transmuxer.onmessage = handleMessage;
18911
18912 if (audioAppendStart) {
18913 transmuxer.postMessage({
18914 action: 'setAudioAppendStart',
18915 appendStart: audioAppendStart
18916 });
18917 } // allow empty arrays to be passed to clear out GOPs
18918
18919
18920 if (Array.isArray(gopsToAlignWith)) {
18921 transmuxer.postMessage({
18922 action: 'alignGopsWith',
18923 gopsToAlignWith: gopsToAlignWith
18924 });
18925 }
18926
18927 if (typeof remux !== 'undefined') {
18928 transmuxer.postMessage({
18929 action: 'setRemux',
18930 remux: remux
18931 });
18932 }
18933
18934 if (bytes.byteLength) {
18935 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
18936 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
18937 transmuxer.postMessage({
18938 action: 'push',
18939 // Send the typed-array of data as an ArrayBuffer so that
18940 // it can be sent as a "Transferable" and avoid the costly
18941 // memory copy
18942 data: buffer,
18943 // To recreate the original typed-array, we need information
18944 // about what portion of the ArrayBuffer it was a view into
18945 byteOffset: byteOffset,
18946 byteLength: bytes.byteLength
18947 }, [buffer]);
18948 }
18949
18950 if (isEndOfTimeline) {
18951 transmuxer.postMessage({
18952 action: 'endTimeline'
18953 });
18954 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
18955 // the end of the segment
18956
18957
18958 transmuxer.postMessage({
18959 action: 'flush'
18960 });
18961 };
18962 var dequeue = function dequeue(transmuxer) {
18963 transmuxer.currentTransmux = null;
18964
18965 if (transmuxer.transmuxQueue.length) {
18966 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
18967
18968 if (typeof transmuxer.currentTransmux === 'function') {
18969 transmuxer.currentTransmux();
18970 } else {
18971 processTransmux(transmuxer.currentTransmux);
18972 }
18973 }
18974 };
18975 var processAction = function processAction(transmuxer, action) {
18976 transmuxer.postMessage({
18977 action: action
18978 });
18979 dequeue(transmuxer);
18980 };
18981 var enqueueAction = function enqueueAction(action, transmuxer) {
18982 if (!transmuxer.currentTransmux) {
18983 transmuxer.currentTransmux = action;
18984 processAction(transmuxer, action);
18985 return;
18986 }
18987
18988 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
18989 };
18990 var reset = function reset(transmuxer) {
18991 enqueueAction('reset', transmuxer);
18992 };
18993 var endTimeline = function endTimeline(transmuxer) {
18994 enqueueAction('endTimeline', transmuxer);
18995 };
18996 var transmux = function transmux(options) {
18997 if (!options.transmuxer.currentTransmux) {
18998 options.transmuxer.currentTransmux = options;
18999 processTransmux(options);
19000 return;
19001 }
19002
19003 options.transmuxer.transmuxQueue.push(options);
19004 };
19005 var createTransmuxer = function createTransmuxer(options) {
19006 var transmuxer = new TransmuxWorker();
19007 transmuxer.currentTransmux = null;
19008 transmuxer.transmuxQueue = [];
19009 var term = transmuxer.terminate;
19010
19011 transmuxer.terminate = function () {
19012 transmuxer.currentTransmux = null;
19013 transmuxer.transmuxQueue.length = 0;
19014 return term.call(transmuxer);
19015 };
19016
19017 transmuxer.postMessage({
19018 action: 'init',
19019 options: options
19020 });
19021 return transmuxer;
19022 };
19023 var segmentTransmuxer = {
19024 reset: reset,
19025 endTimeline: endTimeline,
19026 transmux: transmux,
19027 createTransmuxer: createTransmuxer
19028 };
19029
19030 var workerCallback = function workerCallback(options) {
19031 var transmuxer = options.transmuxer;
19032 var endAction = options.endAction || options.action;
19033 var callback = options.callback;
19034
19035 var message = _extends_1({}, options, {
19036 endAction: null,
19037 transmuxer: null,
19038 callback: null
19039 });
19040
19041 var listenForEndEvent = function listenForEndEvent(event) {
19042 if (event.data.action !== endAction) {
19043 return;
19044 }
19045
19046 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
19047
19048 if (event.data.data) {
19049 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
19050
19051 if (options.data) {
19052 options.data = event.data.data;
19053 }
19054 }
19055
19056 callback(event.data);
19057 };
19058
19059 transmuxer.addEventListener('message', listenForEndEvent);
19060
19061 if (options.data) {
19062 var isArrayBuffer = options.data instanceof ArrayBuffer;
19063 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
19064 message.byteLength = options.data.byteLength;
19065 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
19066 transmuxer.postMessage(message, transfers);
19067 } else {
19068 transmuxer.postMessage(message);
19069 }
19070 };
19071
19072 var REQUEST_ERRORS = {
19073 FAILURE: 2,
19074 TIMEOUT: -101,
19075 ABORTED: -102
19076 };
19077 /**
19078 * Abort all requests
19079 *
19080 * @param {Object} activeXhrs - an object that tracks all XHR requests
19081 */
19082
19083 var abortAll = function abortAll(activeXhrs) {
19084 activeXhrs.forEach(function (xhr) {
19085 xhr.abort();
19086 });
19087 };
19088 /**
19089 * Gather important bandwidth stats once a request has completed
19090 *
19091 * @param {Object} request - the XHR request from which to gather stats
19092 */
19093
19094
19095 var getRequestStats = function getRequestStats(request) {
19096 return {
19097 bandwidth: request.bandwidth,
19098 bytesReceived: request.bytesReceived || 0,
19099 roundTripTime: request.roundTripTime || 0
19100 };
19101 };
19102 /**
19103 * If possible gather bandwidth stats as a request is in
19104 * progress
19105 *
19106 * @param {Event} progressEvent - an event object from an XHR's progress event
19107 */
19108
19109
19110 var getProgressStats = function getProgressStats(progressEvent) {
19111 var request = progressEvent.target;
19112 var roundTripTime = Date.now() - request.requestTime;
19113 var stats = {
19114 bandwidth: Infinity,
19115 bytesReceived: 0,
19116 roundTripTime: roundTripTime || 0
19117 };
19118 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
19119 // because we should only use bandwidth stats on progress to determine when
19120 // abort a request early due to insufficient bandwidth
19121
19122 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
19123 return stats;
19124 };
19125 /**
19126 * Handle all error conditions in one place and return an object
19127 * with all the information
19128 *
19129 * @param {Error|null} error - if non-null signals an error occured with the XHR
19130 * @param {Object} request - the XHR request that possibly generated the error
19131 */
19132
19133
19134 var handleErrors = function handleErrors(error, request) {
19135 if (request.timedout) {
19136 return {
19137 status: request.status,
19138 message: 'HLS request timed-out at URL: ' + request.uri,
19139 code: REQUEST_ERRORS.TIMEOUT,
19140 xhr: request
19141 };
19142 }
19143
19144 if (request.aborted) {
19145 return {
19146 status: request.status,
19147 message: 'HLS request aborted at URL: ' + request.uri,
19148 code: REQUEST_ERRORS.ABORTED,
19149 xhr: request
19150 };
19151 }
19152
19153 if (error) {
19154 return {
19155 status: request.status,
19156 message: 'HLS request errored at URL: ' + request.uri,
19157 code: REQUEST_ERRORS.FAILURE,
19158 xhr: request
19159 };
19160 }
19161
19162 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
19163 return {
19164 status: request.status,
19165 message: 'Empty HLS response at URL: ' + request.uri,
19166 code: REQUEST_ERRORS.FAILURE,
19167 xhr: request
19168 };
19169 }
19170
19171 return null;
19172 };
19173 /**
19174 * Handle responses for key data and convert the key data to the correct format
19175 * for the decryption step later
19176 *
19177 * @param {Object} segment - a simplified copy of the segmentInfo object
19178 * from SegmentLoader
19179 * @param {Array} objects - objects to add the key bytes to.
19180 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19181 * this request
19182 */
19183
19184
19185 var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
19186 return function (error, request) {
19187 var response = request.response;
19188 var errorObj = handleErrors(error, request);
19189
19190 if (errorObj) {
19191 return finishProcessingFn(errorObj, segment);
19192 }
19193
19194 if (response.byteLength !== 16) {
19195 return finishProcessingFn({
19196 status: request.status,
19197 message: 'Invalid HLS key at URL: ' + request.uri,
19198 code: REQUEST_ERRORS.FAILURE,
19199 xhr: request
19200 }, segment);
19201 }
19202
19203 var view = new DataView(response);
19204 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
19205
19206 for (var i = 0; i < objects.length; i++) {
19207 objects[i].bytes = bytes;
19208 }
19209
19210 return finishProcessingFn(null, segment);
19211 };
19212 };
19213
19214 var parseInitSegment = function parseInitSegment(segment, _callback) {
19215 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
19216 // only know how to parse mp4 init segments at the moment
19217
19218 if (type !== 'mp4') {
19219 var uri = segment.map.resolvedUri || segment.map.uri;
19220 return _callback({
19221 internal: true,
19222 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
19223 code: REQUEST_ERRORS.FAILURE
19224 });
19225 }
19226
19227 workerCallback({
19228 action: 'probeMp4Tracks',
19229 data: segment.map.bytes,
19230 transmuxer: segment.transmuxer,
19231 callback: function callback(_ref) {
19232 var tracks = _ref.tracks,
19233 data = _ref.data;
19234 // transfer bytes back to us
19235 segment.map.bytes = data;
19236 tracks.forEach(function (track) {
19237 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
19238
19239 if (segment.map.tracks[track.type]) {
19240 return;
19241 }
19242
19243 segment.map.tracks[track.type] = track;
19244
19245 if (typeof track.id === 'number' && track.timescale) {
19246 segment.map.timescales = segment.map.timescales || {};
19247 segment.map.timescales[track.id] = track.timescale;
19248 }
19249 });
19250 return _callback(null);
19251 }
19252 });
19253 };
19254 /**
19255 * Handle init-segment responses
19256 *
19257 * @param {Object} segment - a simplified copy of the segmentInfo object
19258 * from SegmentLoader
19259 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19260 * this request
19261 */
19262
19263
19264 var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
19265 var segment = _ref2.segment,
19266 finishProcessingFn = _ref2.finishProcessingFn;
19267 return function (error, request) {
19268 var errorObj = handleErrors(error, request);
19269
19270 if (errorObj) {
19271 return finishProcessingFn(errorObj, segment);
19272 }
19273
19274 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
19275 // until the key request is done to decrypt.
19276
19277 if (segment.map.key) {
19278 segment.map.encryptedBytes = bytes;
19279 return finishProcessingFn(null, segment);
19280 }
19281
19282 segment.map.bytes = bytes;
19283 parseInitSegment(segment, function (parseError) {
19284 if (parseError) {
19285 parseError.xhr = request;
19286 parseError.status = request.status;
19287 return finishProcessingFn(parseError, segment);
19288 }
19289
19290 finishProcessingFn(null, segment);
19291 });
19292 };
19293 };
19294 /**
19295 * Response handler for segment-requests being sure to set the correct
19296 * property depending on whether the segment is encryped or not
19297 * Also records and keeps track of stats that are used for ABR purposes
19298 *
19299 * @param {Object} segment - a simplified copy of the segmentInfo object
19300 * from SegmentLoader
19301 * @param {Function} finishProcessingFn - a callback to execute to continue processing
19302 * this request
19303 */
19304
19305
19306 var handleSegmentResponse = function handleSegmentResponse(_ref3) {
19307 var segment = _ref3.segment,
19308 finishProcessingFn = _ref3.finishProcessingFn,
19309 responseType = _ref3.responseType;
19310 return function (error, request) {
19311 var errorObj = handleErrors(error, request);
19312
19313 if (errorObj) {
19314 return finishProcessingFn(errorObj, segment);
19315 }
19316
19317 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
19318 // thrown for two primary cases:
19319 // 1. the mime type override stops working, or is not implemented for a specific
19320 // browser
19321 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
19322 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
19323 segment.stats = getRequestStats(request);
19324
19325 if (segment.key) {
19326 segment.encryptedBytes = new Uint8Array(newBytes);
19327 } else {
19328 segment.bytes = new Uint8Array(newBytes);
19329 }
19330
19331 return finishProcessingFn(null, segment);
19332 };
19333 };
19334
19335 var transmuxAndNotify = function transmuxAndNotify(_ref4) {
19336 var segment = _ref4.segment,
19337 bytes = _ref4.bytes,
19338 trackInfoFn = _ref4.trackInfoFn,
19339 timingInfoFn = _ref4.timingInfoFn,
19340 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
19341 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
19342 id3Fn = _ref4.id3Fn,
19343 captionsFn = _ref4.captionsFn,
19344 isEndOfTimeline = _ref4.isEndOfTimeline,
19345 endedTimelineFn = _ref4.endedTimelineFn,
19346 dataFn = _ref4.dataFn,
19347 doneFn = _ref4.doneFn,
19348 onTransmuxerLog = _ref4.onTransmuxerLog;
19349 var fmp4Tracks = segment.map && segment.map.tracks || {};
19350 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
19351 // One reason for this is that in the case of full segments, we want to trust start
19352 // times from the probe, rather than the transmuxer.
19353
19354 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
19355 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
19356 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
19357 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
19358
19359 var finish = function finish() {
19360 return transmux({
19361 bytes: bytes,
19362 transmuxer: segment.transmuxer,
19363 audioAppendStart: segment.audioAppendStart,
19364 gopsToAlignWith: segment.gopsToAlignWith,
19365 remux: isMuxed,
19366 onData: function onData(result) {
19367 result.type = result.type === 'combined' ? 'video' : result.type;
19368 dataFn(segment, result);
19369 },
19370 onTrackInfo: function onTrackInfo(trackInfo) {
19371 if (trackInfoFn) {
19372 if (isMuxed) {
19373 trackInfo.isMuxed = true;
19374 }
19375
19376 trackInfoFn(segment, trackInfo);
19377 }
19378 },
19379 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
19380 // we only want the first start value we encounter
19381 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
19382 audioStartFn(audioTimingInfo.start);
19383 audioStartFn = null;
19384 } // we want to continually update the end time
19385
19386
19387 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
19388 audioEndFn(audioTimingInfo.end);
19389 }
19390 },
19391 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
19392 // we only want the first start value we encounter
19393 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
19394 videoStartFn(videoTimingInfo.start);
19395 videoStartFn = null;
19396 } // we want to continually update the end time
19397
19398
19399 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
19400 videoEndFn(videoTimingInfo.end);
19401 }
19402 },
19403 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
19404 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
19405 },
19406 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
19407 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
19408 },
19409 onId3: function onId3(id3Frames, dispatchType) {
19410 id3Fn(segment, id3Frames, dispatchType);
19411 },
19412 onCaptions: function onCaptions(captions) {
19413 captionsFn(segment, [captions]);
19414 },
19415 isEndOfTimeline: isEndOfTimeline,
19416 onEndedTimeline: function onEndedTimeline() {
19417 endedTimelineFn();
19418 },
19419 onTransmuxerLog: onTransmuxerLog,
19420 onDone: function onDone(result) {
19421 if (!doneFn) {
19422 return;
19423 }
19424
19425 result.type = result.type === 'combined' ? 'video' : result.type;
19426 doneFn(null, segment, result);
19427 }
19428 });
19429 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
19430 // Meaning cached frame data may corrupt our notion of where this segment
19431 // really starts. To get around this, probe for the info needed.
19432
19433
19434 workerCallback({
19435 action: 'probeTs',
19436 transmuxer: segment.transmuxer,
19437 data: bytes,
19438 baseStartTime: segment.baseStartTime,
19439 callback: function callback(data) {
19440 segment.bytes = bytes = data.data;
19441 var probeResult = data.result;
19442
19443 if (probeResult) {
19444 trackInfoFn(segment, {
19445 hasAudio: probeResult.hasAudio,
19446 hasVideo: probeResult.hasVideo,
19447 isMuxed: isMuxed
19448 });
19449 trackInfoFn = null;
19450
19451 if (probeResult.hasAudio && !isMuxed) {
19452 audioStartFn(probeResult.audioStart);
19453 }
19454
19455 if (probeResult.hasVideo) {
19456 videoStartFn(probeResult.videoStart);
19457 }
19458
19459 audioStartFn = null;
19460 videoStartFn = null;
19461 }
19462
19463 finish();
19464 }
19465 });
19466 };
19467
19468 var handleSegmentBytes = function handleSegmentBytes(_ref5) {
19469 var segment = _ref5.segment,
19470 bytes = _ref5.bytes,
19471 trackInfoFn = _ref5.trackInfoFn,
19472 timingInfoFn = _ref5.timingInfoFn,
19473 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
19474 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
19475 id3Fn = _ref5.id3Fn,
19476 captionsFn = _ref5.captionsFn,
19477 isEndOfTimeline = _ref5.isEndOfTimeline,
19478 endedTimelineFn = _ref5.endedTimelineFn,
19479 dataFn = _ref5.dataFn,
19480 doneFn = _ref5.doneFn,
19481 onTransmuxerLog = _ref5.onTransmuxerLog;
19482 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
19483 // We should have a handler that fetches the number of bytes required
19484 // to check if something is fmp4. This will allow us to save bandwidth
19485 // because we can only blacklist a playlist and abort requests
19486 // by codec after trackinfo triggers.
19487
19488 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
19489 segment.isFmp4 = true;
19490 var tracks = segment.map.tracks;
19491 var trackInfo = {
19492 isFmp4: true,
19493 hasVideo: !!tracks.video,
19494 hasAudio: !!tracks.audio
19495 }; // if we have a audio track, with a codec that is not set to
19496 // encrypted audio
19497
19498 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
19499 trackInfo.audioCodec = tracks.audio.codec;
19500 } // if we have a video track, with a codec that is not set to
19501 // encrypted video
19502
19503
19504 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
19505 trackInfo.videoCodec = tracks.video.codec;
19506 }
19507
19508 if (tracks.video && tracks.audio) {
19509 trackInfo.isMuxed = true;
19510 } // since we don't support appending fmp4 data on progress, we know we have the full
19511 // segment here
19512
19513
19514 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
19515 // time. The end time can be roughly calculated by the receiver using the duration.
19516 //
19517 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
19518 // that is the true start of the segment (where the playback engine should begin
19519 // decoding).
19520
19521 var finishLoading = function finishLoading(captions) {
19522 // if the track still has audio at this point it is only possible
19523 // for it to be audio only. See `tracks.video && tracks.audio` if statement
19524 // above.
19525 // we make sure to use segment.bytes here as that
19526 dataFn(segment, {
19527 data: bytesAsUint8Array,
19528 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
19529 });
19530
19531 if (captions && captions.length) {
19532 captionsFn(segment, captions);
19533 }
19534
19535 doneFn(null, segment, {});
19536 };
19537
19538 workerCallback({
19539 action: 'probeMp4StartTime',
19540 timescales: segment.map.timescales,
19541 data: bytesAsUint8Array,
19542 transmuxer: segment.transmuxer,
19543 callback: function callback(_ref6) {
19544 var data = _ref6.data,
19545 startTime = _ref6.startTime;
19546 // transfer bytes back to us
19547 bytes = data.buffer;
19548 segment.bytes = bytesAsUint8Array = data;
19549
19550 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
19551 timingInfoFn(segment, 'audio', 'start', startTime);
19552 }
19553
19554 if (trackInfo.hasVideo) {
19555 timingInfoFn(segment, 'video', 'start', startTime);
19556 } // Run through the CaptionParser in case there are captions.
19557 // Initialize CaptionParser if it hasn't been yet
19558
19559
19560 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
19561 finishLoading();
19562 return;
19563 }
19564
19565 workerCallback({
19566 action: 'pushMp4Captions',
19567 endAction: 'mp4Captions',
19568 transmuxer: segment.transmuxer,
19569 data: bytesAsUint8Array,
19570 timescales: segment.map.timescales,
19571 trackIds: [tracks.video.id],
19572 callback: function callback(message) {
19573 // transfer bytes back to us
19574 bytes = message.data.buffer;
19575 segment.bytes = bytesAsUint8Array = message.data;
19576 message.logs.forEach(function (log) {
19577 onTransmuxerLog(videojs__default["default"].mergeOptions(log, {
19578 stream: 'mp4CaptionParser'
19579 }));
19580 });
19581 finishLoading(message.captions);
19582 }
19583 });
19584 }
19585 });
19586 return;
19587 } // VTT or other segments that don't need processing
19588
19589
19590 if (!segment.transmuxer) {
19591 doneFn(null, segment, {});
19592 return;
19593 }
19594
19595 if (typeof segment.container === 'undefined') {
19596 segment.container = detectContainerForBytes(bytesAsUint8Array);
19597 }
19598
19599 if (segment.container !== 'ts' && segment.container !== 'aac') {
19600 trackInfoFn(segment, {
19601 hasAudio: false,
19602 hasVideo: false
19603 });
19604 doneFn(null, segment, {});
19605 return;
19606 } // ts or aac
19607
19608
19609 transmuxAndNotify({
19610 segment: segment,
19611 bytes: bytes,
19612 trackInfoFn: trackInfoFn,
19613 timingInfoFn: timingInfoFn,
19614 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19615 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19616 id3Fn: id3Fn,
19617 captionsFn: captionsFn,
19618 isEndOfTimeline: isEndOfTimeline,
19619 endedTimelineFn: endedTimelineFn,
19620 dataFn: dataFn,
19621 doneFn: doneFn,
19622 onTransmuxerLog: onTransmuxerLog
19623 });
19624 };
19625
19626 var decrypt = function decrypt(_ref7, callback) {
19627 var id = _ref7.id,
19628 key = _ref7.key,
19629 encryptedBytes = _ref7.encryptedBytes,
19630 decryptionWorker = _ref7.decryptionWorker;
19631
19632 var decryptionHandler = function decryptionHandler(event) {
19633 if (event.data.source === id) {
19634 decryptionWorker.removeEventListener('message', decryptionHandler);
19635 var decrypted = event.data.decrypted;
19636 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
19637 }
19638 };
19639
19640 decryptionWorker.addEventListener('message', decryptionHandler);
19641 var keyBytes;
19642
19643 if (key.bytes.slice) {
19644 keyBytes = key.bytes.slice();
19645 } else {
19646 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
19647 } // incrementally decrypt the bytes
19648
19649
19650 decryptionWorker.postMessage(createTransferableMessage({
19651 source: id,
19652 encrypted: encryptedBytes,
19653 key: keyBytes,
19654 iv: key.iv
19655 }), [encryptedBytes.buffer, keyBytes.buffer]);
19656 };
19657 /**
19658 * Decrypt the segment via the decryption web worker
19659 *
19660 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19661 * routines
19662 * @param {Object} segment - a simplified copy of the segmentInfo object
19663 * from SegmentLoader
19664 * @param {Function} trackInfoFn - a callback that receives track info
19665 * @param {Function} timingInfoFn - a callback that receives timing info
19666 * @param {Function} videoSegmentTimingInfoFn
19667 * a callback that receives video timing info based on media times and
19668 * any adjustments made by the transmuxer
19669 * @param {Function} audioSegmentTimingInfoFn
19670 * a callback that receives audio timing info based on media times and
19671 * any adjustments made by the transmuxer
19672 * @param {boolean} isEndOfTimeline
19673 * true if this segment represents the last segment in a timeline
19674 * @param {Function} endedTimelineFn
19675 * a callback made when a timeline is ended, will only be called if
19676 * isEndOfTimeline is true
19677 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19678 * and ready to use
19679 * @param {Function} doneFn - a callback that is executed after decryption has completed
19680 */
19681
19682
19683 var decryptSegment = function decryptSegment(_ref8) {
19684 var decryptionWorker = _ref8.decryptionWorker,
19685 segment = _ref8.segment,
19686 trackInfoFn = _ref8.trackInfoFn,
19687 timingInfoFn = _ref8.timingInfoFn,
19688 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
19689 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
19690 id3Fn = _ref8.id3Fn,
19691 captionsFn = _ref8.captionsFn,
19692 isEndOfTimeline = _ref8.isEndOfTimeline,
19693 endedTimelineFn = _ref8.endedTimelineFn,
19694 dataFn = _ref8.dataFn,
19695 doneFn = _ref8.doneFn,
19696 onTransmuxerLog = _ref8.onTransmuxerLog;
19697 decrypt({
19698 id: segment.requestId,
19699 key: segment.key,
19700 encryptedBytes: segment.encryptedBytes,
19701 decryptionWorker: decryptionWorker
19702 }, function (decryptedBytes) {
19703 segment.bytes = decryptedBytes;
19704 handleSegmentBytes({
19705 segment: segment,
19706 bytes: segment.bytes,
19707 trackInfoFn: trackInfoFn,
19708 timingInfoFn: timingInfoFn,
19709 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19710 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19711 id3Fn: id3Fn,
19712 captionsFn: captionsFn,
19713 isEndOfTimeline: isEndOfTimeline,
19714 endedTimelineFn: endedTimelineFn,
19715 dataFn: dataFn,
19716 doneFn: doneFn,
19717 onTransmuxerLog: onTransmuxerLog
19718 });
19719 });
19720 };
19721 /**
19722 * This function waits for all XHRs to finish (with either success or failure)
19723 * before continueing processing via it's callback. The function gathers errors
19724 * from each request into a single errors array so that the error status for
19725 * each request can be examined later.
19726 *
19727 * @param {Object} activeXhrs - an object that tracks all XHR requests
19728 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
19729 * routines
19730 * @param {Function} trackInfoFn - a callback that receives track info
19731 * @param {Function} timingInfoFn - a callback that receives timing info
19732 * @param {Function} videoSegmentTimingInfoFn
19733 * a callback that receives video timing info based on media times and
19734 * any adjustments made by the transmuxer
19735 * @param {Function} audioSegmentTimingInfoFn
19736 * a callback that receives audio timing info based on media times and
19737 * any adjustments made by the transmuxer
19738 * @param {Function} id3Fn - a callback that receives ID3 metadata
19739 * @param {Function} captionsFn - a callback that receives captions
19740 * @param {boolean} isEndOfTimeline
19741 * true if this segment represents the last segment in a timeline
19742 * @param {Function} endedTimelineFn
19743 * a callback made when a timeline is ended, will only be called if
19744 * isEndOfTimeline is true
19745 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19746 * and ready to use
19747 * @param {Function} doneFn - a callback that is executed after all resources have been
19748 * downloaded and any decryption completed
19749 */
19750
19751
19752 var waitForCompletion = function waitForCompletion(_ref9) {
19753 var activeXhrs = _ref9.activeXhrs,
19754 decryptionWorker = _ref9.decryptionWorker,
19755 trackInfoFn = _ref9.trackInfoFn,
19756 timingInfoFn = _ref9.timingInfoFn,
19757 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
19758 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
19759 id3Fn = _ref9.id3Fn,
19760 captionsFn = _ref9.captionsFn,
19761 isEndOfTimeline = _ref9.isEndOfTimeline,
19762 endedTimelineFn = _ref9.endedTimelineFn,
19763 dataFn = _ref9.dataFn,
19764 doneFn = _ref9.doneFn,
19765 onTransmuxerLog = _ref9.onTransmuxerLog;
19766 var count = 0;
19767 var didError = false;
19768 return function (error, segment) {
19769 if (didError) {
19770 return;
19771 }
19772
19773 if (error) {
19774 didError = true; // If there are errors, we have to abort any outstanding requests
19775
19776 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
19777 // handle the aborted events from those requests, there are some cases where we may
19778 // never get an aborted event. For instance, if the network connection is lost and
19779 // there were two requests, the first may have triggered an error immediately, while
19780 // the second request remains unsent. In that case, the aborted algorithm will not
19781 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
19782 //
19783 // We also can't rely on the ready state of the XHR, since the request that
19784 // triggered the connection error may also show as a ready state of 0 (unsent).
19785 // Therefore, we have to finish this group of requests immediately after the first
19786 // seen error.
19787
19788 return doneFn(error, segment);
19789 }
19790
19791 count += 1;
19792
19793 if (count === activeXhrs.length) {
19794 var segmentFinish = function segmentFinish() {
19795 if (segment.encryptedBytes) {
19796 return decryptSegment({
19797 decryptionWorker: decryptionWorker,
19798 segment: segment,
19799 trackInfoFn: trackInfoFn,
19800 timingInfoFn: timingInfoFn,
19801 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19802 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19803 id3Fn: id3Fn,
19804 captionsFn: captionsFn,
19805 isEndOfTimeline: isEndOfTimeline,
19806 endedTimelineFn: endedTimelineFn,
19807 dataFn: dataFn,
19808 doneFn: doneFn,
19809 onTransmuxerLog: onTransmuxerLog
19810 });
19811 } // Otherwise, everything is ready just continue
19812
19813
19814 handleSegmentBytes({
19815 segment: segment,
19816 bytes: segment.bytes,
19817 trackInfoFn: trackInfoFn,
19818 timingInfoFn: timingInfoFn,
19819 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
19820 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
19821 id3Fn: id3Fn,
19822 captionsFn: captionsFn,
19823 isEndOfTimeline: isEndOfTimeline,
19824 endedTimelineFn: endedTimelineFn,
19825 dataFn: dataFn,
19826 doneFn: doneFn,
19827 onTransmuxerLog: onTransmuxerLog
19828 });
19829 }; // Keep track of when *all* of the requests have completed
19830
19831
19832 segment.endOfAllRequests = Date.now();
19833
19834 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
19835 return decrypt({
19836 decryptionWorker: decryptionWorker,
19837 // add -init to the "id" to differentiate between segment
19838 // and init segment decryption, just in case they happen
19839 // at the same time at some point in the future.
19840 id: segment.requestId + '-init',
19841 encryptedBytes: segment.map.encryptedBytes,
19842 key: segment.map.key
19843 }, function (decryptedBytes) {
19844 segment.map.bytes = decryptedBytes;
19845 parseInitSegment(segment, function (parseError) {
19846 if (parseError) {
19847 abortAll(activeXhrs);
19848 return doneFn(parseError, segment);
19849 }
19850
19851 segmentFinish();
19852 });
19853 });
19854 }
19855
19856 segmentFinish();
19857 }
19858 };
19859 };
19860 /**
19861 * Calls the abort callback if any request within the batch was aborted. Will only call
19862 * the callback once per batch of requests, even if multiple were aborted.
19863 *
19864 * @param {Object} loadendState - state to check to see if the abort function was called
19865 * @param {Function} abortFn - callback to call for abort
19866 */
19867
19868
19869 var handleLoadEnd = function handleLoadEnd(_ref10) {
19870 var loadendState = _ref10.loadendState,
19871 abortFn = _ref10.abortFn;
19872 return function (event) {
19873 var request = event.target;
19874
19875 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
19876 abortFn();
19877 loadendState.calledAbortFn = true;
19878 }
19879 };
19880 };
19881 /**
19882 * Simple progress event callback handler that gathers some stats before
19883 * executing a provided callback with the `segment` object
19884 *
19885 * @param {Object} segment - a simplified copy of the segmentInfo object
19886 * from SegmentLoader
19887 * @param {Function} progressFn - a callback that is executed each time a progress event
19888 * is received
19889 * @param {Function} trackInfoFn - a callback that receives track info
19890 * @param {Function} timingInfoFn - a callback that receives timing info
19891 * @param {Function} videoSegmentTimingInfoFn
19892 * a callback that receives video timing info based on media times and
19893 * any adjustments made by the transmuxer
19894 * @param {Function} audioSegmentTimingInfoFn
19895 * a callback that receives audio timing info based on media times and
19896 * any adjustments made by the transmuxer
19897 * @param {boolean} isEndOfTimeline
19898 * true if this segment represents the last segment in a timeline
19899 * @param {Function} endedTimelineFn
19900 * a callback made when a timeline is ended, will only be called if
19901 * isEndOfTimeline is true
19902 * @param {Function} dataFn - a callback that is executed when segment bytes are available
19903 * and ready to use
19904 * @param {Event} event - the progress event object from XMLHttpRequest
19905 */
19906
19907
19908 var handleProgress = function handleProgress(_ref11) {
19909 var segment = _ref11.segment,
19910 progressFn = _ref11.progressFn;
19911 _ref11.trackInfoFn;
19912 _ref11.timingInfoFn;
19913 _ref11.videoSegmentTimingInfoFn;
19914 _ref11.audioSegmentTimingInfoFn;
19915 _ref11.id3Fn;
19916 _ref11.captionsFn;
19917 _ref11.isEndOfTimeline;
19918 _ref11.endedTimelineFn;
19919 _ref11.dataFn;
19920 return function (event) {
19921 var request = event.target;
19922
19923 if (request.aborted) {
19924 return;
19925 }
19926
19927 segment.stats = videojs__default["default"].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
19928
19929 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
19930 segment.stats.firstBytesReceivedAt = Date.now();
19931 }
19932
19933 return progressFn(event, segment);
19934 };
19935 };
19936 /**
19937 * Load all resources and does any processing necessary for a media-segment
19938 *
19939 * Features:
19940 * decrypts the media-segment if it has a key uri and an iv
19941 * aborts *all* requests if *any* one request fails
19942 *
19943 * The segment object, at minimum, has the following format:
19944 * {
19945 * resolvedUri: String,
19946 * [transmuxer]: Object,
19947 * [byterange]: {
19948 * offset: Number,
19949 * length: Number
19950 * },
19951 * [key]: {
19952 * resolvedUri: String
19953 * [byterange]: {
19954 * offset: Number,
19955 * length: Number
19956 * },
19957 * iv: {
19958 * bytes: Uint32Array
19959 * }
19960 * },
19961 * [map]: {
19962 * resolvedUri: String,
19963 * [byterange]: {
19964 * offset: Number,
19965 * length: Number
19966 * },
19967 * [bytes]: Uint8Array
19968 * }
19969 * }
19970 * ...where [name] denotes optional properties
19971 *
19972 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
19973 * @param {Object} xhrOptions - the base options to provide to all xhr requests
19974 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
19975 * decryption routines
19976 * @param {Object} segment - a simplified copy of the segmentInfo object
19977 * from SegmentLoader
19978 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
19979 * aborted
19980 * @param {Function} progressFn - a callback that receives progress events from the main
19981 * segment's xhr request
19982 * @param {Function} trackInfoFn - a callback that receives track info
19983 * @param {Function} timingInfoFn - a callback that receives timing info
19984 * @param {Function} videoSegmentTimingInfoFn
19985 * a callback that receives video timing info based on media times and
19986 * any adjustments made by the transmuxer
19987 * @param {Function} audioSegmentTimingInfoFn
19988 * a callback that receives audio timing info based on media times and
19989 * any adjustments made by the transmuxer
19990 * @param {Function} id3Fn - a callback that receives ID3 metadata
19991 * @param {Function} captionsFn - a callback that receives captions
19992 * @param {boolean} isEndOfTimeline
19993 * true if this segment represents the last segment in a timeline
19994 * @param {Function} endedTimelineFn
19995 * a callback made when a timeline is ended, will only be called if
19996 * isEndOfTimeline is true
19997 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
19998 * request, transmuxed if needed
19999 * @param {Function} doneFn - a callback that is executed only once all requests have
20000 * succeeded or failed
20001 * @return {Function} a function that, when invoked, immediately aborts all
20002 * outstanding requests
20003 */
20004
20005
20006 var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
20007 var xhr = _ref12.xhr,
20008 xhrOptions = _ref12.xhrOptions,
20009 decryptionWorker = _ref12.decryptionWorker,
20010 segment = _ref12.segment,
20011 abortFn = _ref12.abortFn,
20012 progressFn = _ref12.progressFn,
20013 trackInfoFn = _ref12.trackInfoFn,
20014 timingInfoFn = _ref12.timingInfoFn,
20015 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
20016 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
20017 id3Fn = _ref12.id3Fn,
20018 captionsFn = _ref12.captionsFn,
20019 isEndOfTimeline = _ref12.isEndOfTimeline,
20020 endedTimelineFn = _ref12.endedTimelineFn,
20021 dataFn = _ref12.dataFn,
20022 doneFn = _ref12.doneFn,
20023 onTransmuxerLog = _ref12.onTransmuxerLog;
20024 var activeXhrs = [];
20025 var finishProcessingFn = waitForCompletion({
20026 activeXhrs: activeXhrs,
20027 decryptionWorker: decryptionWorker,
20028 trackInfoFn: trackInfoFn,
20029 timingInfoFn: timingInfoFn,
20030 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
20031 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
20032 id3Fn: id3Fn,
20033 captionsFn: captionsFn,
20034 isEndOfTimeline: isEndOfTimeline,
20035 endedTimelineFn: endedTimelineFn,
20036 dataFn: dataFn,
20037 doneFn: doneFn,
20038 onTransmuxerLog: onTransmuxerLog
20039 }); // optionally, request the decryption key
20040
20041 if (segment.key && !segment.key.bytes) {
20042 var objects = [segment.key];
20043
20044 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
20045 objects.push(segment.map.key);
20046 }
20047
20048 var keyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20049 uri: segment.key.resolvedUri,
20050 responseType: 'arraybuffer'
20051 });
20052 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
20053 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
20054 activeXhrs.push(keyXhr);
20055 } // optionally, request the associated media init segment
20056
20057
20058 if (segment.map && !segment.map.bytes) {
20059 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
20060
20061 if (differentMapKey) {
20062 var mapKeyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20063 uri: segment.map.key.resolvedUri,
20064 responseType: 'arraybuffer'
20065 });
20066 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
20067 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
20068 activeXhrs.push(mapKeyXhr);
20069 }
20070
20071 var initSegmentOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20072 uri: segment.map.resolvedUri,
20073 responseType: 'arraybuffer',
20074 headers: segmentXhrHeaders(segment.map)
20075 });
20076 var initSegmentRequestCallback = handleInitSegmentResponse({
20077 segment: segment,
20078 finishProcessingFn: finishProcessingFn
20079 });
20080 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
20081 activeXhrs.push(initSegmentXhr);
20082 }
20083
20084 var segmentRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
20085 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
20086 responseType: 'arraybuffer',
20087 headers: segmentXhrHeaders(segment)
20088 });
20089 var segmentRequestCallback = handleSegmentResponse({
20090 segment: segment,
20091 finishProcessingFn: finishProcessingFn,
20092 responseType: segmentRequestOptions.responseType
20093 });
20094 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
20095 segmentXhr.addEventListener('progress', handleProgress({
20096 segment: segment,
20097 progressFn: progressFn,
20098 trackInfoFn: trackInfoFn,
20099 timingInfoFn: timingInfoFn,
20100 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
20101 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
20102 id3Fn: id3Fn,
20103 captionsFn: captionsFn,
20104 isEndOfTimeline: isEndOfTimeline,
20105 endedTimelineFn: endedTimelineFn,
20106 dataFn: dataFn
20107 }));
20108 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
20109 // multiple times, provide a shared state object
20110
20111 var loadendState = {};
20112 activeXhrs.forEach(function (activeXhr) {
20113 activeXhr.addEventListener('loadend', handleLoadEnd({
20114 loadendState: loadendState,
20115 abortFn: abortFn
20116 }));
20117 });
20118 return function () {
20119 return abortAll(activeXhrs);
20120 };
20121 };
20122
20123 /**
20124 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
20125 * codec strings, or translating codec strings into objects that can be examined.
20126 */
20127 var logFn$1 = logger('CodecUtils');
20128 /**
20129 * Returns a set of codec strings parsed from the playlist or the default
20130 * codec strings if no codecs were specified in the playlist
20131 *
20132 * @param {Playlist} media the current media playlist
20133 * @return {Object} an object with the video and audio codecs
20134 */
20135
20136 var getCodecs = function getCodecs(media) {
20137 // if the codecs were explicitly specified, use them instead of the
20138 // defaults
20139 var mediaAttributes = media.attributes || {};
20140
20141 if (mediaAttributes.CODECS) {
20142 return parseCodecs(mediaAttributes.CODECS);
20143 }
20144 };
20145
20146 var isMaat = function isMaat(master, media) {
20147 var mediaAttributes = media.attributes || {};
20148 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20149 };
20150 var isMuxed = function isMuxed(master, media) {
20151 if (!isMaat(master, media)) {
20152 return true;
20153 }
20154
20155 var mediaAttributes = media.attributes || {};
20156 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
20157
20158 for (var groupId in audioGroup) {
20159 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
20160 // or there are listed playlists (the case for DASH, as the manifest will have already
20161 // provided all of the details necessary to generate the audio playlist, as opposed to
20162 // HLS' externally requested playlists), then the content is demuxed.
20163 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
20164 return true;
20165 }
20166 }
20167
20168 return false;
20169 };
20170 var unwrapCodecList = function unwrapCodecList(codecList) {
20171 var codecs = {};
20172 codecList.forEach(function (_ref) {
20173 var mediaType = _ref.mediaType,
20174 type = _ref.type,
20175 details = _ref.details;
20176 codecs[mediaType] = codecs[mediaType] || [];
20177 codecs[mediaType].push(translateLegacyCodec("" + type + details));
20178 });
20179 Object.keys(codecs).forEach(function (mediaType) {
20180 if (codecs[mediaType].length > 1) {
20181 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
20182 codecs[mediaType] = null;
20183 return;
20184 }
20185
20186 codecs[mediaType] = codecs[mediaType][0];
20187 });
20188 return codecs;
20189 };
20190 var codecCount = function codecCount(codecObj) {
20191 var count = 0;
20192
20193 if (codecObj.audio) {
20194 count++;
20195 }
20196
20197 if (codecObj.video) {
20198 count++;
20199 }
20200
20201 return count;
20202 };
20203 /**
20204 * Calculates the codec strings for a working configuration of
20205 * SourceBuffers to play variant streams in a master playlist. If
20206 * there is no possible working configuration, an empty object will be
20207 * returned.
20208 *
20209 * @param master {Object} the m3u8 object for the master playlist
20210 * @param media {Object} the m3u8 object for the variant playlist
20211 * @return {Object} the codec strings.
20212 *
20213 * @private
20214 */
20215
20216 var codecsForPlaylist = function codecsForPlaylist(master, media) {
20217 var mediaAttributes = media.attributes || {};
20218 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
20219 // Put another way, there is no way to have a video-only multiple-audio HLS!
20220
20221 if (isMaat(master, media) && !codecInfo.audio) {
20222 if (!isMuxed(master, media)) {
20223 // It is possible for codecs to be specified on the audio media group playlist but
20224 // not on the rendition playlist. This is mostly the case for DASH, where audio and
20225 // video are always separate (and separately specified).
20226 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
20227
20228 if (defaultCodecs.audio) {
20229 codecInfo.audio = defaultCodecs.audio;
20230 }
20231 }
20232 }
20233
20234 return codecInfo;
20235 };
20236
20237 var logFn = logger('PlaylistSelector');
20238
20239 var representationToString = function representationToString(representation) {
20240 if (!representation || !representation.playlist) {
20241 return;
20242 }
20243
20244 var playlist = representation.playlist;
20245 return JSON.stringify({
20246 id: playlist.id,
20247 bandwidth: representation.bandwidth,
20248 width: representation.width,
20249 height: representation.height,
20250 codecs: playlist.attributes && playlist.attributes.CODECS || ''
20251 });
20252 }; // Utilities
20253
20254 /**
20255 * Returns the CSS value for the specified property on an element
20256 * using `getComputedStyle`. Firefox has a long-standing issue where
20257 * getComputedStyle() may return null when running in an iframe with
20258 * `display: none`.
20259 *
20260 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
20261 * @param {HTMLElement} el the htmlelement to work on
20262 * @param {string} the proprety to get the style for
20263 */
20264
20265
20266 var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
20267 if (!el) {
20268 return '';
20269 }
20270
20271 var result = window.getComputedStyle(el);
20272
20273 if (!result) {
20274 return '';
20275 }
20276
20277 return result[property];
20278 };
20279 /**
20280 * Resuable stable sort function
20281 *
20282 * @param {Playlists} array
20283 * @param {Function} sortFn Different comparators
20284 * @function stableSort
20285 */
20286
20287
20288 var stableSort = function stableSort(array, sortFn) {
20289 var newArray = array.slice();
20290 array.sort(function (left, right) {
20291 var cmp = sortFn(left, right);
20292
20293 if (cmp === 0) {
20294 return newArray.indexOf(left) - newArray.indexOf(right);
20295 }
20296
20297 return cmp;
20298 });
20299 };
20300 /**
20301 * A comparator function to sort two playlist object by bandwidth.
20302 *
20303 * @param {Object} left a media playlist object
20304 * @param {Object} right a media playlist object
20305 * @return {number} Greater than zero if the bandwidth attribute of
20306 * left is greater than the corresponding attribute of right. Less
20307 * than zero if the bandwidth of right is greater than left and
20308 * exactly zero if the two are equal.
20309 */
20310
20311
20312 var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
20313 var leftBandwidth;
20314 var rightBandwidth;
20315
20316 if (left.attributes.BANDWIDTH) {
20317 leftBandwidth = left.attributes.BANDWIDTH;
20318 }
20319
20320 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
20321
20322 if (right.attributes.BANDWIDTH) {
20323 rightBandwidth = right.attributes.BANDWIDTH;
20324 }
20325
20326 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
20327 return leftBandwidth - rightBandwidth;
20328 };
20329 /**
20330 * A comparator function to sort two playlist object by resolution (width).
20331 *
20332 * @param {Object} left a media playlist object
20333 * @param {Object} right a media playlist object
20334 * @return {number} Greater than zero if the resolution.width attribute of
20335 * left is greater than the corresponding attribute of right. Less
20336 * than zero if the resolution.width of right is greater than left and
20337 * exactly zero if the two are equal.
20338 */
20339
20340 var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
20341 var leftWidth;
20342 var rightWidth;
20343
20344 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
20345 leftWidth = left.attributes.RESOLUTION.width;
20346 }
20347
20348 leftWidth = leftWidth || window.Number.MAX_VALUE;
20349
20350 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
20351 rightWidth = right.attributes.RESOLUTION.width;
20352 }
20353
20354 rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
20355 // have the same media dimensions/ resolution
20356
20357 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
20358 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
20359 }
20360
20361 return leftWidth - rightWidth;
20362 };
20363 /**
20364 * Chooses the appropriate media playlist based on bandwidth and player size
20365 *
20366 * @param {Object} master
20367 * Object representation of the master manifest
20368 * @param {number} playerBandwidth
20369 * Current calculated bandwidth of the player
20370 * @param {number} playerWidth
20371 * Current width of the player element (should account for the device pixel ratio)
20372 * @param {number} playerHeight
20373 * Current height of the player element (should account for the device pixel ratio)
20374 * @param {boolean} limitRenditionByPlayerDimensions
20375 * True if the player width and height should be used during the selection, false otherwise
20376 * @param {Object} masterPlaylistController
20377 * the current masterPlaylistController object
20378 * @return {Playlist} the highest bitrate playlist less than the
20379 * currently detected bandwidth, accounting for some amount of
20380 * bandwidth variance
20381 */
20382
20383 var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
20384 // If we end up getting called before `master` is available, exit early
20385 if (!master) {
20386 return;
20387 }
20388
20389 var options = {
20390 bandwidth: playerBandwidth,
20391 width: playerWidth,
20392 height: playerHeight,
20393 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
20394 };
20395 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
20396
20397 if (Playlist.isAudioOnly(master)) {
20398 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
20399 // at the buttom of this function for debugging.
20400
20401 options.audioOnly = true;
20402 } // convert the playlists to an intermediary representation to make comparisons easier
20403
20404
20405 var sortedPlaylistReps = playlists.map(function (playlist) {
20406 var bandwidth;
20407 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
20408 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
20409 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
20410 bandwidth = bandwidth || window.Number.MAX_VALUE;
20411 return {
20412 bandwidth: bandwidth,
20413 width: width,
20414 height: height,
20415 playlist: playlist
20416 };
20417 });
20418 stableSort(sortedPlaylistReps, function (left, right) {
20419 return left.bandwidth - right.bandwidth;
20420 }); // filter out any playlists that have been excluded due to
20421 // incompatible configurations
20422
20423 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20424 return !Playlist.isIncompatible(rep.playlist);
20425 }); // filter out any playlists that have been disabled manually through the representations
20426 // api or blacklisted temporarily due to playback errors.
20427
20428 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20429 return Playlist.isEnabled(rep.playlist);
20430 });
20431
20432 if (!enabledPlaylistReps.length) {
20433 // if there are no enabled playlists, then they have all been blacklisted or disabled
20434 // by the user through the representations api. In this case, ignore blacklisting and
20435 // fallback to what the user wants by using playlists the user has not disabled.
20436 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
20437 return !Playlist.isDisabled(rep.playlist);
20438 });
20439 } // filter out any variant that has greater effective bitrate
20440 // than the current estimated bandwidth
20441
20442
20443 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
20444 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
20445 });
20446 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
20447 // and then taking the very first element
20448
20449 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
20450 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20451 })[0]; // if we're not going to limit renditions by player size, make an early decision.
20452
20453 if (limitRenditionByPlayerDimensions === false) {
20454 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20455
20456 if (_chosenRep && _chosenRep.playlist) {
20457 var type = 'sortedPlaylistReps';
20458
20459 if (bandwidthBestRep) {
20460 type = 'bandwidthBestRep';
20461 }
20462
20463 if (enabledPlaylistReps[0]) {
20464 type = 'enabledPlaylistReps';
20465 }
20466
20467 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
20468 return _chosenRep.playlist;
20469 }
20470
20471 logFn('could not choose a playlist with options', options);
20472 return null;
20473 } // filter out playlists without resolution information
20474
20475
20476 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
20477 return rep.width && rep.height;
20478 }); // sort variants by resolution
20479
20480 stableSort(haveResolution, function (left, right) {
20481 return left.width - right.width;
20482 }); // if we have the exact resolution as the player use it
20483
20484 var resolutionBestRepList = haveResolution.filter(function (rep) {
20485 return rep.width === playerWidth && rep.height === playerHeight;
20486 });
20487 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
20488
20489 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
20490 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20491 })[0];
20492 var resolutionPlusOneList;
20493 var resolutionPlusOneSmallest;
20494 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
20495 // if there is no match of exact resolution
20496
20497 if (!resolutionBestRep) {
20498 resolutionPlusOneList = haveResolution.filter(function (rep) {
20499 return rep.width > playerWidth || rep.height > playerHeight;
20500 }); // find all the variants have the same smallest resolution
20501
20502 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
20503 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
20504 }); // ensure that we also pick the highest bandwidth variant that
20505 // is just-larger-than the video player
20506
20507 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
20508 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
20509 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
20510 })[0];
20511 }
20512
20513 var leastPixelDiffRep; // If this selector proves to be better than others,
20514 // resolutionPlusOneRep and resolutionBestRep and all
20515 // the code involving them should be removed.
20516
20517 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
20518 // find the variant that is closest to the player's pixel size
20519 var leastPixelDiffList = haveResolution.map(function (rep) {
20520 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
20521 return rep;
20522 }); // get the highest bandwidth, closest resolution playlist
20523
20524 stableSort(leastPixelDiffList, function (left, right) {
20525 // sort by highest bandwidth if pixelDiff is the same
20526 if (left.pixelDiff === right.pixelDiff) {
20527 return right.bandwidth - left.bandwidth;
20528 }
20529
20530 return left.pixelDiff - right.pixelDiff;
20531 });
20532 leastPixelDiffRep = leastPixelDiffList[0];
20533 } // fallback chain of variants
20534
20535
20536 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
20537
20538 if (chosenRep && chosenRep.playlist) {
20539 var _type = 'sortedPlaylistReps';
20540
20541 if (leastPixelDiffRep) {
20542 _type = 'leastPixelDiffRep';
20543 } else if (resolutionPlusOneRep) {
20544 _type = 'resolutionPlusOneRep';
20545 } else if (resolutionBestRep) {
20546 _type = 'resolutionBestRep';
20547 } else if (bandwidthBestRep) {
20548 _type = 'bandwidthBestRep';
20549 } else if (enabledPlaylistReps[0]) {
20550 _type = 'enabledPlaylistReps';
20551 }
20552
20553 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
20554 return chosenRep.playlist;
20555 }
20556
20557 logFn('could not choose a playlist with options', options);
20558 return null;
20559 };
20560
20561 /**
20562 * Chooses the appropriate media playlist based on the most recent
20563 * bandwidth estimate and the player size.
20564 *
20565 * Expects to be called within the context of an instance of VhsHandler
20566 *
20567 * @return {Playlist} the highest bitrate playlist less than the
20568 * currently detected bandwidth, accounting for some amount of
20569 * bandwidth variance
20570 */
20571
20572 var lastBandwidthSelector = function lastBandwidthSelector() {
20573 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20574 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20575 };
20576 /**
20577 * Chooses the appropriate media playlist based on an
20578 * exponential-weighted moving average of the bandwidth after
20579 * filtering for player size.
20580 *
20581 * Expects to be called within the context of an instance of VhsHandler
20582 *
20583 * @param {number} decay - a number between 0 and 1. Higher values of
20584 * this parameter will cause previous bandwidth estimates to lose
20585 * significance more quickly.
20586 * @return {Function} a function which can be invoked to create a new
20587 * playlist selector function.
20588 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
20589 */
20590
20591 var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
20592 var average = -1;
20593 var lastSystemBandwidth = -1;
20594
20595 if (decay < 0 || decay > 1) {
20596 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
20597 }
20598
20599 return function () {
20600 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
20601
20602 if (average < 0) {
20603 average = this.systemBandwidth;
20604 lastSystemBandwidth = this.systemBandwidth;
20605 } // stop the average value from decaying for every 250ms
20606 // when the systemBandwidth is constant
20607 // and
20608 // stop average from setting to a very low value when the
20609 // systemBandwidth becomes 0 in case of chunk cancellation
20610
20611
20612 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
20613 average = decay * this.systemBandwidth + (1 - decay) * average;
20614 lastSystemBandwidth = this.systemBandwidth;
20615 }
20616
20617 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
20618 };
20619 };
20620 /**
20621 * Chooses the appropriate media playlist based on the potential to rebuffer
20622 *
20623 * @param {Object} settings
20624 * Object of information required to use this selector
20625 * @param {Object} settings.master
20626 * Object representation of the master manifest
20627 * @param {number} settings.currentTime
20628 * The current time of the player
20629 * @param {number} settings.bandwidth
20630 * Current measured bandwidth
20631 * @param {number} settings.duration
20632 * Duration of the media
20633 * @param {number} settings.segmentDuration
20634 * Segment duration to be used in round trip time calculations
20635 * @param {number} settings.timeUntilRebuffer
20636 * Time left in seconds until the player has to rebuffer
20637 * @param {number} settings.currentTimeline
20638 * The current timeline segments are being loaded from
20639 * @param {SyncController} settings.syncController
20640 * SyncController for determining if we have a sync point for a given playlist
20641 * @return {Object|null}
20642 * {Object} return.playlist
20643 * The highest bandwidth playlist with the least amount of rebuffering
20644 * {Number} return.rebufferingImpact
20645 * The amount of time in seconds switching to this playlist will rebuffer. A
20646 * negative value means that switching will cause zero rebuffering.
20647 */
20648
20649 var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
20650 var master = settings.master,
20651 currentTime = settings.currentTime,
20652 bandwidth = settings.bandwidth,
20653 duration = settings.duration,
20654 segmentDuration = settings.segmentDuration,
20655 timeUntilRebuffer = settings.timeUntilRebuffer,
20656 currentTimeline = settings.currentTimeline,
20657 syncController = settings.syncController; // filter out any playlists that have been excluded due to
20658 // incompatible configurations
20659
20660 var compatiblePlaylists = master.playlists.filter(function (playlist) {
20661 return !Playlist.isIncompatible(playlist);
20662 }); // filter out any playlists that have been disabled manually through the representations
20663 // api or blacklisted temporarily due to playback errors.
20664
20665 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
20666
20667 if (!enabledPlaylists.length) {
20668 // if there are no enabled playlists, then they have all been blacklisted or disabled
20669 // by the user through the representations api. In this case, ignore blacklisting and
20670 // fallback to what the user wants by using playlists the user has not disabled.
20671 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
20672 return !Playlist.isDisabled(playlist);
20673 });
20674 }
20675
20676 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
20677 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
20678 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
20679 // sync request first. This will double the request time
20680
20681 var numRequests = syncPoint ? 1 : 2;
20682 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
20683 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
20684 return {
20685 playlist: playlist,
20686 rebufferingImpact: rebufferingImpact
20687 };
20688 });
20689 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
20690 return estimate.rebufferingImpact <= 0;
20691 }); // Sort by bandwidth DESC
20692
20693 stableSort(noRebufferingPlaylists, function (a, b) {
20694 return comparePlaylistBandwidth(b.playlist, a.playlist);
20695 });
20696
20697 if (noRebufferingPlaylists.length) {
20698 return noRebufferingPlaylists[0];
20699 }
20700
20701 stableSort(rebufferingEstimates, function (a, b) {
20702 return a.rebufferingImpact - b.rebufferingImpact;
20703 });
20704 return rebufferingEstimates[0] || null;
20705 };
20706 /**
20707 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
20708 * one with video. If no renditions with video exist, return the lowest audio rendition.
20709 *
20710 * Expects to be called within the context of an instance of VhsHandler
20711 *
20712 * @return {Object|null}
20713 * {Object} return.playlist
20714 * The lowest bitrate playlist that contains a video codec. If no such rendition
20715 * exists pick the lowest audio rendition.
20716 */
20717
20718 var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
20719 var _this = this;
20720
20721 // filter out any playlists that have been excluded due to
20722 // incompatible configurations or playback errors
20723 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
20724
20725 stableSort(playlists, function (a, b) {
20726 return comparePlaylistBandwidth(a, b);
20727 }); // Parse and assume that playlists with no video codec have no video
20728 // (this is not necessarily true, although it is generally true).
20729 //
20730 // If an entire manifest has no valid videos everything will get filtered
20731 // out.
20732
20733 var playlistsWithVideo = playlists.filter(function (playlist) {
20734 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
20735 });
20736 return playlistsWithVideo[0] || null;
20737 };
20738
20739 /**
20740 * Combine all segments into a single Uint8Array
20741 *
20742 * @param {Object} segmentObj
20743 * @return {Uint8Array} concatenated bytes
20744 * @private
20745 */
20746 var concatSegments = function concatSegments(segmentObj) {
20747 var offset = 0;
20748 var tempBuffer;
20749
20750 if (segmentObj.bytes) {
20751 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
20752
20753 segmentObj.segments.forEach(function (segment) {
20754 tempBuffer.set(segment, offset);
20755 offset += segment.byteLength;
20756 });
20757 }
20758
20759 return tempBuffer;
20760 };
20761
20762 /**
20763 * @file text-tracks.js
20764 */
20765 /**
20766 * Create captions text tracks on video.js if they do not exist
20767 *
20768 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20769 * @param {Object} tech the video.js tech
20770 * @param {Object} captionStream the caption stream to create
20771 * @private
20772 */
20773
20774 var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
20775 if (!inbandTextTracks[captionStream]) {
20776 tech.trigger({
20777 type: 'usage',
20778 name: 'vhs-608'
20779 });
20780 tech.trigger({
20781 type: 'usage',
20782 name: 'hls-608'
20783 });
20784 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
20785
20786 if (/^cc708_/.test(captionStream)) {
20787 instreamId = 'SERVICE' + captionStream.split('_')[1];
20788 }
20789
20790 var track = tech.textTracks().getTrackById(instreamId);
20791
20792 if (track) {
20793 // Resuse an existing track with a CC# id because this was
20794 // very likely created by videojs-contrib-hls from information
20795 // in the m3u8 for us to use
20796 inbandTextTracks[captionStream] = track;
20797 } else {
20798 // This section gets called when we have caption services that aren't specified in the manifest.
20799 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
20800 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
20801 var label = captionStream;
20802 var language = captionStream;
20803 var def = false;
20804 var captionService = captionServices[instreamId];
20805
20806 if (captionService) {
20807 label = captionService.label;
20808 language = captionService.language;
20809 def = captionService.default;
20810 } // Otherwise, create a track with the default `CC#` label and
20811 // without a language
20812
20813
20814 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
20815 kind: 'captions',
20816 id: instreamId,
20817 // TODO: investigate why this doesn't seem to turn the caption on by default
20818 default: def,
20819 label: label,
20820 language: language
20821 }, false).track;
20822 }
20823 }
20824 };
20825 /**
20826 * Add caption text track data to a source handler given an array of captions
20827 *
20828 * @param {Object}
20829 * @param {Object} inbandTextTracks the inband text tracks
20830 * @param {number} timestampOffset the timestamp offset of the source buffer
20831 * @param {Array} captionArray an array of caption data
20832 * @private
20833 */
20834
20835 var addCaptionData = function addCaptionData(_ref) {
20836 var inbandTextTracks = _ref.inbandTextTracks,
20837 captionArray = _ref.captionArray,
20838 timestampOffset = _ref.timestampOffset;
20839
20840 if (!captionArray) {
20841 return;
20842 }
20843
20844 var Cue = window.WebKitDataCue || window.VTTCue;
20845 captionArray.forEach(function (caption) {
20846 var track = caption.stream;
20847 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
20848 });
20849 };
20850 /**
20851 * Define properties on a cue for backwards compatability,
20852 * but warn the user that the way that they are using it
20853 * is depricated and will be removed at a later date.
20854 *
20855 * @param {Cue} cue the cue to add the properties on
20856 * @private
20857 */
20858
20859 var deprecateOldCue = function deprecateOldCue(cue) {
20860 Object.defineProperties(cue.frame, {
20861 id: {
20862 get: function get() {
20863 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
20864 return cue.value.key;
20865 }
20866 },
20867 value: {
20868 get: function get() {
20869 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
20870 return cue.value.data;
20871 }
20872 },
20873 privateData: {
20874 get: function get() {
20875 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
20876 return cue.value.data;
20877 }
20878 }
20879 });
20880 };
20881 /**
20882 * Add metadata text track data to a source handler given an array of metadata
20883 *
20884 * @param {Object}
20885 * @param {Object} inbandTextTracks the inband text tracks
20886 * @param {Array} metadataArray an array of meta data
20887 * @param {number} timestampOffset the timestamp offset of the source buffer
20888 * @param {number} videoDuration the duration of the video
20889 * @private
20890 */
20891
20892
20893 var addMetadata = function addMetadata(_ref2) {
20894 var inbandTextTracks = _ref2.inbandTextTracks,
20895 metadataArray = _ref2.metadataArray,
20896 timestampOffset = _ref2.timestampOffset,
20897 videoDuration = _ref2.videoDuration;
20898
20899 if (!metadataArray) {
20900 return;
20901 }
20902
20903 var Cue = window.WebKitDataCue || window.VTTCue;
20904 var metadataTrack = inbandTextTracks.metadataTrack_;
20905
20906 if (!metadataTrack) {
20907 return;
20908 }
20909
20910 metadataArray.forEach(function (metadata) {
20911 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
20912 // ignore this bit of metadata.
20913 // This likely occurs when you have an non-timed ID3 tag like TIT2,
20914 // which is the "Title/Songname/Content description" frame
20915
20916 if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
20917 return;
20918 }
20919
20920 metadata.frames.forEach(function (frame) {
20921 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
20922 cue.frame = frame;
20923 cue.value = frame;
20924 deprecateOldCue(cue);
20925 metadataTrack.addCue(cue);
20926 });
20927 });
20928
20929 if (!metadataTrack.cues || !metadataTrack.cues.length) {
20930 return;
20931 } // Updating the metadeta cues so that
20932 // the endTime of each cue is the startTime of the next cue
20933 // the endTime of last cue is the duration of the video
20934
20935
20936 var cues = metadataTrack.cues;
20937 var cuesArray = []; // Create a copy of the TextTrackCueList...
20938 // ...disregarding cues with a falsey value
20939
20940 for (var i = 0; i < cues.length; i++) {
20941 if (cues[i]) {
20942 cuesArray.push(cues[i]);
20943 }
20944 } // Group cues by their startTime value
20945
20946
20947 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
20948 var timeSlot = obj[cue.startTime] || [];
20949 timeSlot.push(cue);
20950 obj[cue.startTime] = timeSlot;
20951 return obj;
20952 }, {}); // Sort startTimes by ascending order
20953
20954 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
20955 return Number(a) - Number(b);
20956 }); // Map each cue group's endTime to the next group's startTime
20957
20958 sortedStartTimes.forEach(function (startTime, idx) {
20959 var cueGroup = cuesGroupedByStartTime[startTime];
20960 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
20961
20962 cueGroup.forEach(function (cue) {
20963 cue.endTime = nextTime;
20964 });
20965 });
20966 };
20967 /**
20968 * Create metadata text track on video.js if it does not exist
20969 *
20970 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
20971 * @param {string} dispatchType the inband metadata track dispatch type
20972 * @param {Object} tech the video.js tech
20973 * @private
20974 */
20975
20976 var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
20977 if (inbandTextTracks.metadataTrack_) {
20978 return;
20979 }
20980
20981 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
20982 kind: 'metadata',
20983 label: 'Timed Metadata'
20984 }, false).track;
20985 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
20986 };
20987 /**
20988 * Remove cues from a track on video.js.
20989 *
20990 * @param {Double} start start of where we should remove the cue
20991 * @param {Double} end end of where the we should remove the cue
20992 * @param {Object} track the text track to remove the cues from
20993 * @private
20994 */
20995
20996 var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
20997 var i;
20998 var cue;
20999
21000 if (!track) {
21001 return;
21002 }
21003
21004 if (!track.cues) {
21005 return;
21006 }
21007
21008 i = track.cues.length;
21009
21010 while (i--) {
21011 cue = track.cues[i]; // Remove any cue within the provided start and end time
21012
21013 if (cue.startTime >= start && cue.endTime <= end) {
21014 track.removeCue(cue);
21015 }
21016 }
21017 };
21018 /**
21019 * Remove duplicate cues from a track on video.js (a cue is considered a
21020 * duplicate if it has the same time interval and text as another)
21021 *
21022 * @param {Object} track the text track to remove the duplicate cues from
21023 * @private
21024 */
21025
21026 var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
21027 var cues = track.cues;
21028
21029 if (!cues) {
21030 return;
21031 }
21032
21033 for (var i = 0; i < cues.length; i++) {
21034 var duplicates = [];
21035 var occurrences = 0;
21036
21037 for (var j = 0; j < cues.length; j++) {
21038 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
21039 occurrences++;
21040
21041 if (occurrences > 1) {
21042 duplicates.push(cues[j]);
21043 }
21044 }
21045 }
21046
21047 if (duplicates.length) {
21048 duplicates.forEach(function (dupe) {
21049 return track.removeCue(dupe);
21050 });
21051 }
21052 }
21053 };
21054
21055 /**
21056 * mux.js
21057 *
21058 * Copyright (c) Brightcove
21059 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
21060 */
21061 var ONE_SECOND_IN_TS = 90000,
21062 // 90kHz clock
21063 secondsToVideoTs,
21064 secondsToAudioTs,
21065 videoTsToSeconds,
21066 audioTsToSeconds,
21067 audioTsToVideoTs,
21068 videoTsToAudioTs,
21069 metadataTsToSeconds;
21070
21071 secondsToVideoTs = function secondsToVideoTs(seconds) {
21072 return seconds * ONE_SECOND_IN_TS;
21073 };
21074
21075 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
21076 return seconds * sampleRate;
21077 };
21078
21079 videoTsToSeconds = function videoTsToSeconds(timestamp) {
21080 return timestamp / ONE_SECOND_IN_TS;
21081 };
21082
21083 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
21084 return timestamp / sampleRate;
21085 };
21086
21087 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
21088 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
21089 };
21090
21091 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
21092 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
21093 };
21094 /**
21095 * Adjust ID3 tag or caption timing information by the timeline pts values
21096 * (if keepOriginalTimestamps is false) and convert to seconds
21097 */
21098
21099
21100 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
21101 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
21102 };
21103
21104 var clock = {
21105 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
21106 secondsToVideoTs: secondsToVideoTs,
21107 secondsToAudioTs: secondsToAudioTs,
21108 videoTsToSeconds: videoTsToSeconds,
21109 audioTsToSeconds: audioTsToSeconds,
21110 audioTsToVideoTs: audioTsToVideoTs,
21111 videoTsToAudioTs: videoTsToAudioTs,
21112 metadataTsToSeconds: metadataTsToSeconds
21113 };
21114
21115 /**
21116 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
21117 * front of current time.
21118 *
21119 * @param {Array} buffer
21120 * The current buffer of gop information
21121 * @param {number} currentTime
21122 * The current time
21123 * @param {Double} mapping
21124 * Offset to map display time to stream presentation time
21125 * @return {Array}
21126 * List of gops considered safe to append over
21127 */
21128
21129 var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
21130 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
21131 return [];
21132 } // pts value for current time + 3 seconds to give a bit more wiggle room
21133
21134
21135 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
21136 var i;
21137
21138 for (i = 0; i < buffer.length; i++) {
21139 if (buffer[i].pts > currentTimePts) {
21140 break;
21141 }
21142 }
21143
21144 return buffer.slice(i);
21145 };
21146 /**
21147 * Appends gop information (timing and byteLength) received by the transmuxer for the
21148 * gops appended in the last call to appendBuffer
21149 *
21150 * @param {Array} buffer
21151 * The current buffer of gop information
21152 * @param {Array} gops
21153 * List of new gop information
21154 * @param {boolean} replace
21155 * If true, replace the buffer with the new gop information. If false, append the
21156 * new gop information to the buffer in the right location of time.
21157 * @return {Array}
21158 * Updated list of gop information
21159 */
21160
21161 var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
21162 if (!gops.length) {
21163 return buffer;
21164 }
21165
21166 if (replace) {
21167 // If we are in safe append mode, then completely overwrite the gop buffer
21168 // with the most recent appeneded data. This will make sure that when appending
21169 // future segments, we only try to align with gops that are both ahead of current
21170 // time and in the last segment appended.
21171 return gops.slice();
21172 }
21173
21174 var start = gops[0].pts;
21175 var i = 0;
21176
21177 for (i; i < buffer.length; i++) {
21178 if (buffer[i].pts >= start) {
21179 break;
21180 }
21181 }
21182
21183 return buffer.slice(0, i).concat(gops);
21184 };
21185 /**
21186 * Removes gop information in buffer that overlaps with provided start and end
21187 *
21188 * @param {Array} buffer
21189 * The current buffer of gop information
21190 * @param {Double} start
21191 * position to start the remove at
21192 * @param {Double} end
21193 * position to end the remove at
21194 * @param {Double} mapping
21195 * Offset to map display time to stream presentation time
21196 */
21197
21198 var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
21199 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
21200 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
21201 var updatedBuffer = buffer.slice();
21202 var i = buffer.length;
21203
21204 while (i--) {
21205 if (buffer[i].pts <= endPts) {
21206 break;
21207 }
21208 }
21209
21210 if (i === -1) {
21211 // no removal because end of remove range is before start of buffer
21212 return updatedBuffer;
21213 }
21214
21215 var j = i + 1;
21216
21217 while (j--) {
21218 if (buffer[j].pts <= startPts) {
21219 break;
21220 }
21221 } // clamp remove range start to 0 index
21222
21223
21224 j = Math.max(j, 0);
21225 updatedBuffer.splice(j, i - j + 1);
21226 return updatedBuffer;
21227 };
21228
21229 var shallowEqual = function shallowEqual(a, b) {
21230 // if both are undefined
21231 // or one or the other is undefined
21232 // they are not equal
21233 if (!a && !b || !a && b || a && !b) {
21234 return false;
21235 } // they are the same object and thus, equal
21236
21237
21238 if (a === b) {
21239 return true;
21240 } // sort keys so we can make sure they have
21241 // all the same keys later.
21242
21243
21244 var akeys = Object.keys(a).sort();
21245 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
21246
21247 if (akeys.length !== bkeys.length) {
21248 return false;
21249 }
21250
21251 for (var i = 0; i < akeys.length; i++) {
21252 var key = akeys[i]; // different sorted keys, not equal
21253
21254 if (key !== bkeys[i]) {
21255 return false;
21256 } // different values, not equal
21257
21258
21259 if (a[key] !== b[key]) {
21260 return false;
21261 }
21262 }
21263
21264 return true;
21265 };
21266
21267 // https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
21268 var QUOTA_EXCEEDED_ERR = 22;
21269
21270 /**
21271 * The segment loader has no recourse except to fetch a segment in the
21272 * current playlist and use the internal timestamps in that segment to
21273 * generate a syncPoint. This function returns a good candidate index
21274 * for that process.
21275 *
21276 * @param {Array} segments - the segments array from a playlist.
21277 * @return {number} An index of a segment from the playlist to load
21278 */
21279
21280 var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
21281 segments = segments || [];
21282 var timelineSegments = [];
21283 var time = 0;
21284
21285 for (var i = 0; i < segments.length; i++) {
21286 var segment = segments[i];
21287
21288 if (currentTimeline === segment.timeline) {
21289 timelineSegments.push(i);
21290 time += segment.duration;
21291
21292 if (time > targetTime) {
21293 return i;
21294 }
21295 }
21296 }
21297
21298 if (timelineSegments.length === 0) {
21299 return 0;
21300 } // default to the last timeline segment
21301
21302
21303 return timelineSegments[timelineSegments.length - 1];
21304 }; // In the event of a quota exceeded error, keep at least one second of back buffer. This
21305 // number was arbitrarily chosen and may be updated in the future, but seemed reasonable
21306 // as a start to prevent any potential issues with removing content too close to the
21307 // playhead.
21308
21309 var MIN_BACK_BUFFER = 1; // in ms
21310
21311 var CHECK_BUFFER_DELAY = 500;
21312
21313 var finite = function finite(num) {
21314 return typeof num === 'number' && isFinite(num);
21315 }; // With most content hovering around 30fps, if a segment has a duration less than a half
21316 // frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
21317 // not accurately reflect the rest of the content.
21318
21319
21320 var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
21321 var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
21322 // Although these checks should most likely cover non 'main' types, for now it narrows
21323 // the scope of our checks.
21324 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
21325 return null;
21326 }
21327
21328 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
21329 return 'Neither audio nor video found in segment.';
21330 }
21331
21332 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
21333 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
21334 }
21335
21336 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
21337 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
21338 }
21339
21340 return null;
21341 };
21342 /**
21343 * Calculates a time value that is safe to remove from the back buffer without interrupting
21344 * playback.
21345 *
21346 * @param {TimeRange} seekable
21347 * The current seekable range
21348 * @param {number} currentTime
21349 * The current time of the player
21350 * @param {number} targetDuration
21351 * The target duration of the current playlist
21352 * @return {number}
21353 * Time that is safe to remove from the back buffer without interrupting playback
21354 */
21355
21356 var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
21357 // 30 seconds before the playhead provides a safe default for trimming.
21358 //
21359 // Choosing a reasonable default is particularly important for high bitrate content and
21360 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
21361 // throw an APPEND_BUFFER_ERR.
21362 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
21363
21364 if (seekable.length) {
21365 // Some live playlists may have a shorter window of content than the full allowed back
21366 // buffer. For these playlists, don't save content that's no longer within the window.
21367 trimTime = Math.max(trimTime, seekable.start(0));
21368 } // Don't remove within target duration of the current time to avoid the possibility of
21369 // removing the GOP currently being played, as removing it can cause playback stalls.
21370
21371
21372 var maxTrimTime = currentTime - targetDuration;
21373 return Math.min(maxTrimTime, trimTime);
21374 };
21375 var segmentInfoString = function segmentInfoString(segmentInfo) {
21376 var startOfSegment = segmentInfo.startOfSegment,
21377 duration = segmentInfo.duration,
21378 segment = segmentInfo.segment,
21379 part = segmentInfo.part,
21380 _segmentInfo$playlist = segmentInfo.playlist,
21381 seq = _segmentInfo$playlist.mediaSequence,
21382 id = _segmentInfo$playlist.id,
21383 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
21384 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
21385 index = segmentInfo.mediaIndex,
21386 partIndex = segmentInfo.partIndex,
21387 timeline = segmentInfo.timeline;
21388 var segmentLen = segments.length - 1;
21389 var selection = 'mediaIndex/partIndex increment';
21390
21391 if (segmentInfo.getMediaInfoForTime) {
21392 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
21393 } else if (segmentInfo.isSyncRequest) {
21394 selection = 'getSyncSegmentCandidate (isSyncRequest)';
21395 }
21396
21397 if (segmentInfo.independent) {
21398 selection += " with independent " + segmentInfo.independent;
21399 }
21400
21401 var hasPartIndex = typeof partIndex === 'number';
21402 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
21403 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
21404 preloadSegment: segment
21405 }) - 1 : 0;
21406 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
21407 };
21408
21409 var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
21410 return mediaType + "TimingInfo";
21411 };
21412 /**
21413 * Returns the timestamp offset to use for the segment.
21414 *
21415 * @param {number} segmentTimeline
21416 * The timeline of the segment
21417 * @param {number} currentTimeline
21418 * The timeline currently being followed by the loader
21419 * @param {number} startOfSegment
21420 * The estimated segment start
21421 * @param {TimeRange[]} buffered
21422 * The loader's buffer
21423 * @param {boolean} overrideCheck
21424 * If true, no checks are made to see if the timestamp offset value should be set,
21425 * but sets it directly to a value.
21426 *
21427 * @return {number|null}
21428 * Either a number representing a new timestamp offset, or null if the segment is
21429 * part of the same timeline
21430 */
21431
21432
21433 var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
21434 var segmentTimeline = _ref.segmentTimeline,
21435 currentTimeline = _ref.currentTimeline,
21436 startOfSegment = _ref.startOfSegment,
21437 buffered = _ref.buffered,
21438 overrideCheck = _ref.overrideCheck;
21439
21440 // Check to see if we are crossing a discontinuity to see if we need to set the
21441 // timestamp offset on the transmuxer and source buffer.
21442 //
21443 // Previously, we changed the timestampOffset if the start of this segment was less than
21444 // the currently set timestampOffset, but this isn't desirable as it can produce bad
21445 // behavior, especially around long running live streams.
21446 if (!overrideCheck && segmentTimeline === currentTimeline) {
21447 return null;
21448 } // When changing renditions, it's possible to request a segment on an older timeline. For
21449 // instance, given two renditions with the following:
21450 //
21451 // #EXTINF:10
21452 // segment1
21453 // #EXT-X-DISCONTINUITY
21454 // #EXTINF:10
21455 // segment2
21456 // #EXTINF:10
21457 // segment3
21458 //
21459 // And the current player state:
21460 //
21461 // current time: 8
21462 // buffer: 0 => 20
21463 //
21464 // The next segment on the current rendition would be segment3, filling the buffer from
21465 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
21466 // then the next segment to be requested will be segment1 from the new rendition in
21467 // order to fill time 8 and onwards. Using the buffered end would result in repeated
21468 // content (since it would position segment1 of the new rendition starting at 20s). This
21469 // case can be identified when the new segment's timeline is a prior value. Instead of
21470 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
21471 // more accurate to the actual start time of the segment.
21472
21473
21474 if (segmentTimeline < currentTimeline) {
21475 return startOfSegment;
21476 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
21477 // value uses the end of the last segment if it is available. While this value
21478 // should often be correct, it's better to rely on the buffered end, as the new
21479 // content post discontinuity should line up with the buffered end as if it were
21480 // time 0 for the new content.
21481
21482
21483 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
21484 };
21485 /**
21486 * Returns whether or not the loader should wait for a timeline change from the timeline
21487 * change controller before processing the segment.
21488 *
21489 * Primary timing in VHS goes by video. This is different from most media players, as
21490 * audio is more often used as the primary timing source. For the foreseeable future, VHS
21491 * will continue to use video as the primary timing source, due to the current logic and
21492 * expectations built around it.
21493
21494 * Since the timing follows video, in order to maintain sync, the video loader is
21495 * responsible for setting both audio and video source buffer timestamp offsets.
21496 *
21497 * Setting different values for audio and video source buffers could lead to
21498 * desyncing. The following examples demonstrate some of the situations where this
21499 * distinction is important. Note that all of these cases involve demuxed content. When
21500 * content is muxed, the audio and video are packaged together, therefore syncing
21501 * separate media playlists is not an issue.
21502 *
21503 * CASE 1: Audio prepares to load a new timeline before video:
21504 *
21505 * Timeline: 0 1
21506 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21507 * Audio Loader: ^
21508 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21509 * Video Loader ^
21510 *
21511 * In the above example, the audio loader is preparing to load the 6th segment, the first
21512 * after a discontinuity, while the video loader is still loading the 5th segment, before
21513 * the discontinuity.
21514 *
21515 * If the audio loader goes ahead and loads and appends the 6th segment before the video
21516 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
21517 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
21518 * the audio loader must provide the audioAppendStart value to trim the content in the
21519 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
21520 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
21521 * segment until that value is provided.
21522 *
21523 * CASE 2: Video prepares to load a new timeline before audio:
21524 *
21525 * Timeline: 0 1
21526 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21527 * Audio Loader: ^
21528 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21529 * Video Loader ^
21530 *
21531 * In the above example, the video loader is preparing to load the 6th segment, the first
21532 * after a discontinuity, while the audio loader is still loading the 5th segment, before
21533 * the discontinuity.
21534 *
21535 * If the video loader goes ahead and loads and appends the 6th segment, then once the
21536 * segment is loaded and processed, both the video and audio timestamp offsets will be
21537 * set, since video is used as the primary timing source. This is to ensure content lines
21538 * up appropriately, as any modifications to the video timing are reflected by audio when
21539 * the video loader sets the audio and video timestamp offsets to the same value. However,
21540 * setting the timestamp offset for audio before audio has had a chance to change
21541 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
21542 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
21543 *
21544 * CASE 3: When seeking, audio prepares to load a new timeline before video
21545 *
21546 * Timeline: 0 1
21547 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21548 * Audio Loader: ^
21549 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
21550 * Video Loader ^
21551 *
21552 * In the above example, both audio and video loaders are loading segments from timeline
21553 * 0, but imagine that the seek originated from timeline 1.
21554 *
21555 * When seeking to a new timeline, the timestamp offset will be set based on the expected
21556 * segment start of the loaded video segment. In order to maintain sync, the audio loader
21557 * must wait for the video loader to load its segment and update both the audio and video
21558 * timestamp offsets before it may load and append its own segment. This is the case
21559 * whether the seek results in a mismatched segment request (e.g., the audio loader
21560 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
21561 * loaders choose to load the same segment index from each playlist, as the segments may
21562 * not be aligned perfectly, even for matching segment indexes.
21563 *
21564 * @param {Object} timelinechangeController
21565 * @param {number} currentTimeline
21566 * The timeline currently being followed by the loader
21567 * @param {number} segmentTimeline
21568 * The timeline of the segment being loaded
21569 * @param {('main'|'audio')} loaderType
21570 * The loader type
21571 * @param {boolean} audioDisabled
21572 * Whether the audio is disabled for the loader. This should only be true when the
21573 * loader may have muxed audio in its segment, but should not append it, e.g., for
21574 * the main loader when an alternate audio playlist is active.
21575 *
21576 * @return {boolean}
21577 * Whether the loader should wait for a timeline change from the timeline change
21578 * controller before processing the segment
21579 */
21580
21581 var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
21582 var timelineChangeController = _ref2.timelineChangeController,
21583 currentTimeline = _ref2.currentTimeline,
21584 segmentTimeline = _ref2.segmentTimeline,
21585 loaderType = _ref2.loaderType,
21586 audioDisabled = _ref2.audioDisabled;
21587
21588 if (currentTimeline === segmentTimeline) {
21589 return false;
21590 }
21591
21592 if (loaderType === 'audio') {
21593 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
21594 type: 'main'
21595 }); // Audio loader should wait if:
21596 //
21597 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
21598 // * main hasn't yet changed to the timeline audio is looking to load
21599
21600 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
21601 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
21602 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
21603 // loader's segments (or the content is audio/video only and handled by the main
21604 // loader).
21605
21606
21607 if (loaderType === 'main' && audioDisabled) {
21608 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
21609 type: 'audio'
21610 }); // Main loader should wait for the audio loader if audio is not pending a timeline
21611 // change to the current timeline.
21612 //
21613 // Since the main loader is responsible for setting the timestamp offset for both
21614 // audio and video, the main loader must wait for audio to be about to change to its
21615 // timeline before setting the offset, otherwise, if audio is behind in loading,
21616 // segments from the previous timeline would be adjusted by the new timestamp offset.
21617 //
21618 // This requirement means that video will not cross a timeline until the audio is
21619 // about to cross to it, so that way audio and video will always cross the timeline
21620 // together.
21621 //
21622 // In addition to normal timeline changes, these rules also apply to the start of a
21623 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
21624 // that these rules apply to the first timeline change because if they did not, it's
21625 // possible that the main loader will cross two timelines before the audio loader has
21626 // crossed one. Logic may be implemented to handle the startup as a special case, but
21627 // it's easier to simply treat all timeline changes the same.
21628
21629 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
21630 return false;
21631 }
21632
21633 return true;
21634 }
21635
21636 return false;
21637 };
21638 var mediaDuration = function mediaDuration(timingInfos) {
21639 var maxDuration = 0;
21640 ['video', 'audio'].forEach(function (type) {
21641 var typeTimingInfo = timingInfos[type + "TimingInfo"];
21642
21643 if (!typeTimingInfo) {
21644 return;
21645 }
21646
21647 var start = typeTimingInfo.start,
21648 end = typeTimingInfo.end;
21649 var duration;
21650
21651 if (typeof start === 'bigint' || typeof end === 'bigint') {
21652 duration = window.BigInt(end) - window.BigInt(start);
21653 } else if (typeof start === 'number' && typeof end === 'number') {
21654 duration = end - start;
21655 }
21656
21657 if (typeof duration !== 'undefined' && duration > maxDuration) {
21658 maxDuration = duration;
21659 }
21660 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
21661 // as we only need BigInt when we are above that.
21662
21663 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
21664 maxDuration = Number(maxDuration);
21665 }
21666
21667 return maxDuration;
21668 };
21669 var segmentTooLong = function segmentTooLong(_ref3) {
21670 var segmentDuration = _ref3.segmentDuration,
21671 maxDuration = _ref3.maxDuration;
21672
21673 // 0 duration segments are most likely due to metadata only segments or a lack of
21674 // information.
21675 if (!segmentDuration) {
21676 return false;
21677 } // For HLS:
21678 //
21679 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
21680 // The EXTINF duration of each Media Segment in the Playlist
21681 // file, when rounded to the nearest integer, MUST be less than or equal
21682 // to the target duration; longer segments can trigger playback stalls
21683 // or other errors.
21684 //
21685 // For DASH, the mpd-parser uses the largest reported segment duration as the target
21686 // duration. Although that reported duration is occasionally approximate (i.e., not
21687 // exact), a strict check may report that a segment is too long more often in DASH.
21688
21689
21690 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
21691 };
21692 var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
21693 // Right now we aren't following DASH's timing model exactly, so only perform
21694 // this check for HLS content.
21695 if (sourceType !== 'hls') {
21696 return null;
21697 }
21698
21699 var segmentDuration = mediaDuration({
21700 audioTimingInfo: segmentInfo.audioTimingInfo,
21701 videoTimingInfo: segmentInfo.videoTimingInfo
21702 }); // Don't report if we lack information.
21703 //
21704 // If the segment has a duration of 0 it is either a lack of information or a
21705 // metadata only segment and shouldn't be reported here.
21706
21707 if (!segmentDuration) {
21708 return null;
21709 }
21710
21711 var targetDuration = segmentInfo.playlist.targetDuration;
21712 var isSegmentWayTooLong = segmentTooLong({
21713 segmentDuration: segmentDuration,
21714 maxDuration: targetDuration * 2
21715 });
21716 var isSegmentSlightlyTooLong = segmentTooLong({
21717 segmentDuration: segmentDuration,
21718 maxDuration: targetDuration
21719 });
21720 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
21721
21722 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
21723 return {
21724 severity: isSegmentWayTooLong ? 'warn' : 'info',
21725 message: segmentTooLongMessage
21726 };
21727 }
21728
21729 return null;
21730 };
21731 /**
21732 * An object that manages segment loading and appending.
21733 *
21734 * @class SegmentLoader
21735 * @param {Object} options required and optional options
21736 * @extends videojs.EventTarget
21737 */
21738
21739 var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
21740 inheritsLoose(SegmentLoader, _videojs$EventTarget);
21741
21742 function SegmentLoader(settings, options) {
21743 var _this;
21744
21745 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
21746
21747 if (!settings) {
21748 throw new TypeError('Initialization settings are required');
21749 }
21750
21751 if (typeof settings.currentTime !== 'function') {
21752 throw new TypeError('No currentTime getter specified');
21753 }
21754
21755 if (!settings.mediaSource) {
21756 throw new TypeError('No MediaSource specified');
21757 } // public properties
21758
21759
21760 _this.bandwidth = settings.bandwidth;
21761 _this.throughput = {
21762 rate: 0,
21763 count: 0
21764 };
21765 _this.roundTrip = NaN;
21766
21767 _this.resetStats_();
21768
21769 _this.mediaIndex = null;
21770 _this.partIndex = null; // private settings
21771
21772 _this.hasPlayed_ = settings.hasPlayed;
21773 _this.currentTime_ = settings.currentTime;
21774 _this.seekable_ = settings.seekable;
21775 _this.seeking_ = settings.seeking;
21776 _this.duration_ = settings.duration;
21777 _this.mediaSource_ = settings.mediaSource;
21778 _this.vhs_ = settings.vhs;
21779 _this.loaderType_ = settings.loaderType;
21780 _this.currentMediaInfo_ = void 0;
21781 _this.startingMediaInfo_ = void 0;
21782 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
21783 _this.goalBufferLength_ = settings.goalBufferLength;
21784 _this.sourceType_ = settings.sourceType;
21785 _this.sourceUpdater_ = settings.sourceUpdater;
21786 _this.inbandTextTracks_ = settings.inbandTextTracks;
21787 _this.state_ = 'INIT';
21788 _this.timelineChangeController_ = settings.timelineChangeController;
21789 _this.shouldSaveSegmentTimingInfo_ = true;
21790 _this.parse708captions_ = settings.parse708captions;
21791 _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
21792 _this.captionServices_ = settings.captionServices;
21793 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
21794
21795 _this.checkBufferTimeout_ = null;
21796 _this.error_ = void 0;
21797 _this.currentTimeline_ = -1;
21798 _this.pendingSegment_ = null;
21799 _this.xhrOptions_ = null;
21800 _this.pendingSegments_ = [];
21801 _this.audioDisabled_ = false;
21802 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
21803
21804 _this.gopBuffer_ = [];
21805 _this.timeMapping_ = 0;
21806 _this.safeAppend_ = videojs__default["default"].browser.IE_VERSION >= 11;
21807 _this.appendInitSegment_ = {
21808 audio: true,
21809 video: true
21810 };
21811 _this.playlistOfLastInitSegment_ = {
21812 audio: null,
21813 video: null
21814 };
21815 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
21816 // information yet to start the loading process (e.g., if the audio loader wants to
21817 // load a segment from the next timeline but the main loader hasn't yet crossed that
21818 // timeline), then the load call will be added to the queue until it is ready to be
21819 // processed.
21820
21821 _this.loadQueue_ = [];
21822 _this.metadataQueue_ = {
21823 id3: [],
21824 caption: []
21825 };
21826 _this.waitingOnRemove_ = false;
21827 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
21828
21829 _this.activeInitSegmentId_ = null;
21830 _this.initSegments_ = {}; // HLSe playback
21831
21832 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
21833 _this.keyCache_ = {};
21834 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
21835 // between a time in the display time and a segment index within
21836 // a playlist
21837
21838 _this.syncController_ = settings.syncController;
21839 _this.syncPoint_ = {
21840 segmentIndex: 0,
21841 time: 0
21842 };
21843 _this.transmuxer_ = _this.createTransmuxer_();
21844
21845 _this.triggerSyncInfoUpdate_ = function () {
21846 return _this.trigger('syncinfoupdate');
21847 };
21848
21849 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
21850
21851 _this.mediaSource_.addEventListener('sourceopen', function () {
21852 if (!_this.isEndOfStream_()) {
21853 _this.ended_ = false;
21854 }
21855 }); // ...for determining the fetch location
21856
21857
21858 _this.fetchAtBuffer_ = false;
21859 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
21860 Object.defineProperty(assertThisInitialized(_this), 'state', {
21861 get: function get() {
21862 return this.state_;
21863 },
21864 set: function set(newState) {
21865 if (newState !== this.state_) {
21866 this.logger_(this.state_ + " -> " + newState);
21867 this.state_ = newState;
21868 this.trigger('statechange');
21869 }
21870 }
21871 });
21872
21873 _this.sourceUpdater_.on('ready', function () {
21874 if (_this.hasEnoughInfoToAppend_()) {
21875 _this.processCallQueue_();
21876 }
21877 }); // Only the main loader needs to listen for pending timeline changes, as the main
21878 // loader should wait for audio to be ready to change its timeline so that both main
21879 // and audio timelines change together. For more details, see the
21880 // shouldWaitForTimelineChange function.
21881
21882
21883 if (_this.loaderType_ === 'main') {
21884 _this.timelineChangeController_.on('pendingtimelinechange', function () {
21885 if (_this.hasEnoughInfoToAppend_()) {
21886 _this.processCallQueue_();
21887 }
21888 });
21889 } // The main loader only listens on pending timeline changes, but the audio loader,
21890 // since its loads follow main, needs to listen on timeline changes. For more details,
21891 // see the shouldWaitForTimelineChange function.
21892
21893
21894 if (_this.loaderType_ === 'audio') {
21895 _this.timelineChangeController_.on('timelinechange', function () {
21896 if (_this.hasEnoughInfoToLoad_()) {
21897 _this.processLoadQueue_();
21898 }
21899
21900 if (_this.hasEnoughInfoToAppend_()) {
21901 _this.processCallQueue_();
21902 }
21903 });
21904 }
21905
21906 return _this;
21907 }
21908
21909 var _proto = SegmentLoader.prototype;
21910
21911 _proto.createTransmuxer_ = function createTransmuxer_() {
21912 return segmentTransmuxer.createTransmuxer({
21913 remux: false,
21914 alignGopsAtEnd: this.safeAppend_,
21915 keepOriginalTimestamps: true,
21916 parse708captions: this.parse708captions_,
21917 captionServices: this.captionServices_
21918 });
21919 }
21920 /**
21921 * reset all of our media stats
21922 *
21923 * @private
21924 */
21925 ;
21926
21927 _proto.resetStats_ = function resetStats_() {
21928 this.mediaBytesTransferred = 0;
21929 this.mediaRequests = 0;
21930 this.mediaRequestsAborted = 0;
21931 this.mediaRequestsTimedout = 0;
21932 this.mediaRequestsErrored = 0;
21933 this.mediaTransferDuration = 0;
21934 this.mediaSecondsLoaded = 0;
21935 this.mediaAppends = 0;
21936 }
21937 /**
21938 * dispose of the SegmentLoader and reset to the default state
21939 */
21940 ;
21941
21942 _proto.dispose = function dispose() {
21943 this.trigger('dispose');
21944 this.state = 'DISPOSED';
21945 this.pause();
21946 this.abort_();
21947
21948 if (this.transmuxer_) {
21949 this.transmuxer_.terminate();
21950 }
21951
21952 this.resetStats_();
21953
21954 if (this.checkBufferTimeout_) {
21955 window.clearTimeout(this.checkBufferTimeout_);
21956 }
21957
21958 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
21959 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
21960 }
21961
21962 this.off();
21963 };
21964
21965 _proto.setAudio = function setAudio(enable) {
21966 this.audioDisabled_ = !enable;
21967
21968 if (enable) {
21969 this.appendInitSegment_.audio = true;
21970 } else {
21971 // remove current track audio if it gets disabled
21972 this.sourceUpdater_.removeAudio(0, this.duration_());
21973 }
21974 }
21975 /**
21976 * abort anything that is currently doing on with the SegmentLoader
21977 * and reset to a default state
21978 */
21979 ;
21980
21981 _proto.abort = function abort() {
21982 if (this.state !== 'WAITING') {
21983 if (this.pendingSegment_) {
21984 this.pendingSegment_ = null;
21985 }
21986
21987 return;
21988 }
21989
21990 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
21991 // since we are no longer "waiting" on any requests. XHR callback is not always run
21992 // when the request is aborted. This will prevent the loader from being stuck in the
21993 // WAITING state indefinitely.
21994
21995 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
21996 // next segment
21997
21998 if (!this.paused()) {
21999 this.monitorBuffer_();
22000 }
22001 }
22002 /**
22003 * abort all pending xhr requests and null any pending segements
22004 *
22005 * @private
22006 */
22007 ;
22008
22009 _proto.abort_ = function abort_() {
22010 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
22011 this.pendingSegment_.abortRequests();
22012 } // clear out the segment being processed
22013
22014
22015 this.pendingSegment_ = null;
22016 this.callQueue_ = [];
22017 this.loadQueue_ = [];
22018 this.metadataQueue_.id3 = [];
22019 this.metadataQueue_.caption = [];
22020 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
22021 this.waitingOnRemove_ = false;
22022 window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
22023 this.quotaExceededErrorRetryTimeout_ = null;
22024 };
22025
22026 _proto.checkForAbort_ = function checkForAbort_(requestId) {
22027 // If the state is APPENDING, then aborts will not modify the state, meaning the first
22028 // callback that happens should reset the state to READY so that loading can continue.
22029 if (this.state === 'APPENDING' && !this.pendingSegment_) {
22030 this.state = 'READY';
22031 return true;
22032 }
22033
22034 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
22035 return true;
22036 }
22037
22038 return false;
22039 }
22040 /**
22041 * set an error on the segment loader and null out any pending segements
22042 *
22043 * @param {Error} error the error to set on the SegmentLoader
22044 * @return {Error} the error that was set or that is currently set
22045 */
22046 ;
22047
22048 _proto.error = function error(_error) {
22049 if (typeof _error !== 'undefined') {
22050 this.logger_('error occurred:', _error);
22051 this.error_ = _error;
22052 }
22053
22054 this.pendingSegment_ = null;
22055 return this.error_;
22056 };
22057
22058 _proto.endOfStream = function endOfStream() {
22059 this.ended_ = true;
22060
22061 if (this.transmuxer_) {
22062 // need to clear out any cached data to prepare for the new segment
22063 segmentTransmuxer.reset(this.transmuxer_);
22064 }
22065
22066 this.gopBuffer_.length = 0;
22067 this.pause();
22068 this.trigger('ended');
22069 }
22070 /**
22071 * Indicates which time ranges are buffered
22072 *
22073 * @return {TimeRange}
22074 * TimeRange object representing the current buffered ranges
22075 */
22076 ;
22077
22078 _proto.buffered_ = function buffered_() {
22079 var trackInfo = this.getMediaInfo_();
22080
22081 if (!this.sourceUpdater_ || !trackInfo) {
22082 return videojs__default["default"].createTimeRanges();
22083 }
22084
22085 if (this.loaderType_ === 'main') {
22086 var hasAudio = trackInfo.hasAudio,
22087 hasVideo = trackInfo.hasVideo,
22088 isMuxed = trackInfo.isMuxed;
22089
22090 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
22091 return this.sourceUpdater_.buffered();
22092 }
22093
22094 if (hasVideo) {
22095 return this.sourceUpdater_.videoBuffered();
22096 }
22097 } // One case that can be ignored for now is audio only with alt audio,
22098 // as we don't yet have proper support for that.
22099
22100
22101 return this.sourceUpdater_.audioBuffered();
22102 }
22103 /**
22104 * Gets and sets init segment for the provided map
22105 *
22106 * @param {Object} map
22107 * The map object representing the init segment to get or set
22108 * @param {boolean=} set
22109 * If true, the init segment for the provided map should be saved
22110 * @return {Object}
22111 * map object for desired init segment
22112 */
22113 ;
22114
22115 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
22116 if (set === void 0) {
22117 set = false;
22118 }
22119
22120 if (!map) {
22121 return null;
22122 }
22123
22124 var id = initSegmentId(map);
22125 var storedMap = this.initSegments_[id];
22126
22127 if (set && !storedMap && map.bytes) {
22128 this.initSegments_[id] = storedMap = {
22129 resolvedUri: map.resolvedUri,
22130 byterange: map.byterange,
22131 bytes: map.bytes,
22132 tracks: map.tracks,
22133 timescales: map.timescales
22134 };
22135 }
22136
22137 return storedMap || map;
22138 }
22139 /**
22140 * Gets and sets key for the provided key
22141 *
22142 * @param {Object} key
22143 * The key object representing the key to get or set
22144 * @param {boolean=} set
22145 * If true, the key for the provided key should be saved
22146 * @return {Object}
22147 * Key object for desired key
22148 */
22149 ;
22150
22151 _proto.segmentKey = function segmentKey(key, set) {
22152 if (set === void 0) {
22153 set = false;
22154 }
22155
22156 if (!key) {
22157 return null;
22158 }
22159
22160 var id = segmentKeyId(key);
22161 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
22162 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
22163
22164 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
22165 this.keyCache_[id] = storedKey = {
22166 resolvedUri: key.resolvedUri,
22167 bytes: key.bytes
22168 };
22169 }
22170
22171 var result = {
22172 resolvedUri: (storedKey || key).resolvedUri
22173 };
22174
22175 if (storedKey) {
22176 result.bytes = storedKey.bytes;
22177 }
22178
22179 return result;
22180 }
22181 /**
22182 * Returns true if all configuration required for loading is present, otherwise false.
22183 *
22184 * @return {boolean} True if the all configuration is ready for loading
22185 * @private
22186 */
22187 ;
22188
22189 _proto.couldBeginLoading_ = function couldBeginLoading_() {
22190 return this.playlist_ && !this.paused();
22191 }
22192 /**
22193 * load a playlist and start to fill the buffer
22194 */
22195 ;
22196
22197 _proto.load = function load() {
22198 // un-pause
22199 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
22200 // specified
22201
22202 if (!this.playlist_) {
22203 return;
22204 } // if all the configuration is ready, initialize and begin loading
22205
22206
22207 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22208 return this.init_();
22209 } // if we're in the middle of processing a segment already, don't
22210 // kick off an additional segment request
22211
22212
22213 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
22214 return;
22215 }
22216
22217 this.state = 'READY';
22218 }
22219 /**
22220 * Once all the starting parameters have been specified, begin
22221 * operation. This method should only be invoked from the INIT
22222 * state.
22223 *
22224 * @private
22225 */
22226 ;
22227
22228 _proto.init_ = function init_() {
22229 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
22230 // audio data from the muxed content should be removed
22231
22232 this.resetEverything();
22233 return this.monitorBuffer_();
22234 }
22235 /**
22236 * set a playlist on the segment loader
22237 *
22238 * @param {PlaylistLoader} media the playlist to set on the segment loader
22239 */
22240 ;
22241
22242 _proto.playlist = function playlist(newPlaylist, options) {
22243 if (options === void 0) {
22244 options = {};
22245 }
22246
22247 if (!newPlaylist) {
22248 return;
22249 }
22250
22251 var oldPlaylist = this.playlist_;
22252 var segmentInfo = this.pendingSegment_;
22253 this.playlist_ = newPlaylist;
22254 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
22255 // is always our zero-time so force a sync update each time the playlist
22256 // is refreshed from the server
22257 //
22258 // Use the INIT state to determine if playback has started, as the playlist sync info
22259 // should be fixed once requests begin (as sync points are generated based on sync
22260 // info), but not before then.
22261
22262 if (this.state === 'INIT') {
22263 newPlaylist.syncInfo = {
22264 mediaSequence: newPlaylist.mediaSequence,
22265 time: 0
22266 }; // Setting the date time mapping means mapping the program date time (if available)
22267 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
22268 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
22269 // be updated as the playlist is refreshed before the loader starts loading, the
22270 // program date time mapping needs to be updated as well.
22271 //
22272 // This mapping is only done for the main loader because a program date time should
22273 // map equivalently between playlists.
22274
22275 if (this.loaderType_ === 'main') {
22276 this.syncController_.setDateTimeMappingForStart(newPlaylist);
22277 }
22278 }
22279
22280 var oldId = null;
22281
22282 if (oldPlaylist) {
22283 if (oldPlaylist.id) {
22284 oldId = oldPlaylist.id;
22285 } else if (oldPlaylist.uri) {
22286 oldId = oldPlaylist.uri;
22287 }
22288 }
22289
22290 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
22291 // in LIVE, we always want to update with new playlists (including refreshes)
22292
22293 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
22294 // buffering now
22295
22296 if (this.state === 'INIT' && this.couldBeginLoading_()) {
22297 return this.init_();
22298 }
22299
22300 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
22301 if (this.mediaIndex !== null) {
22302 // we must reset/resync the segment loader when we switch renditions and
22303 // the segment loader is already synced to the previous rendition
22304 // on playlist changes we want it to be possible to fetch
22305 // at the buffer for vod but not for live. So we use resetLoader
22306 // for live and resyncLoader for vod. We want this because
22307 // if a playlist uses independent and non-independent segments/parts the
22308 // buffer may not accurately reflect the next segment that we should try
22309 // downloading.
22310 if (!newPlaylist.endList) {
22311 this.resetLoader();
22312 } else {
22313 this.resyncLoader();
22314 }
22315 }
22316
22317 this.currentMediaInfo_ = void 0;
22318 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
22319
22320 return;
22321 } // we reloaded the same playlist so we are in a live scenario
22322 // and we will likely need to adjust the mediaIndex
22323
22324
22325 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
22326 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
22327 // this is important because we can abort a request and this value must be
22328 // equal to the last appended mediaIndex
22329
22330 if (this.mediaIndex !== null) {
22331 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
22332 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
22333 // new playlist was incremented by 1.
22334
22335 if (this.mediaIndex < 0) {
22336 this.mediaIndex = null;
22337 this.partIndex = null;
22338 } else {
22339 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
22340 // unless parts fell off of the playlist for this segment.
22341 // In that case we need to reset partIndex and resync
22342
22343 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
22344 var mediaIndex = this.mediaIndex;
22345 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
22346 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
22347 // as the part was dropped from our current playlists segment.
22348 // The mediaIndex will still be valid so keep that around.
22349
22350 this.mediaIndex = mediaIndex;
22351 }
22352 }
22353 } // update the mediaIndex on the SegmentInfo object
22354 // this is important because we will update this.mediaIndex with this value
22355 // in `handleAppendsDone_` after the segment has been successfully appended
22356
22357
22358 if (segmentInfo) {
22359 segmentInfo.mediaIndex -= mediaSequenceDiff;
22360
22361 if (segmentInfo.mediaIndex < 0) {
22362 segmentInfo.mediaIndex = null;
22363 segmentInfo.partIndex = null;
22364 } else {
22365 // we need to update the referenced segment so that timing information is
22366 // saved for the new playlist's segment, however, if the segment fell off the
22367 // playlist, we can leave the old reference and just lose the timing info
22368 if (segmentInfo.mediaIndex >= 0) {
22369 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
22370 }
22371
22372 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
22373 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
22374 }
22375 }
22376 }
22377
22378 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
22379 }
22380 /**
22381 * Prevent the loader from fetching additional segments. If there
22382 * is a segment request outstanding, it will finish processing
22383 * before the loader halts. A segment loader can be unpaused by
22384 * calling load().
22385 */
22386 ;
22387
22388 _proto.pause = function pause() {
22389 if (this.checkBufferTimeout_) {
22390 window.clearTimeout(this.checkBufferTimeout_);
22391 this.checkBufferTimeout_ = null;
22392 }
22393 }
22394 /**
22395 * Returns whether the segment loader is fetching additional
22396 * segments when given the opportunity. This property can be
22397 * modified through calls to pause() and load().
22398 */
22399 ;
22400
22401 _proto.paused = function paused() {
22402 return this.checkBufferTimeout_ === null;
22403 }
22404 /**
22405 * Delete all the buffered data and reset the SegmentLoader
22406 *
22407 * @param {Function} [done] an optional callback to be executed when the remove
22408 * operation is complete
22409 */
22410 ;
22411
22412 _proto.resetEverything = function resetEverything(done) {
22413 this.ended_ = false;
22414 this.activeInitSegmentId_ = null;
22415 this.appendInitSegment_ = {
22416 audio: true,
22417 video: true
22418 };
22419 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
22420 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
22421 // we then clamp the value to duration if necessary.
22422
22423 this.remove(0, Infinity, done); // clears fmp4 captions
22424
22425 if (this.transmuxer_) {
22426 this.transmuxer_.postMessage({
22427 action: 'clearAllMp4Captions'
22428 }); // reset the cache in the transmuxer
22429
22430 this.transmuxer_.postMessage({
22431 action: 'reset'
22432 });
22433 }
22434 }
22435 /**
22436 * Force the SegmentLoader to resync and start loading around the currentTime instead
22437 * of starting at the end of the buffer
22438 *
22439 * Useful for fast quality changes
22440 */
22441 ;
22442
22443 _proto.resetLoader = function resetLoader() {
22444 this.fetchAtBuffer_ = false;
22445 this.resyncLoader();
22446 }
22447 /**
22448 * Force the SegmentLoader to restart synchronization and make a conservative guess
22449 * before returning to the simple walk-forward method
22450 */
22451 ;
22452
22453 _proto.resyncLoader = function resyncLoader() {
22454 if (this.transmuxer_) {
22455 // need to clear out any cached data to prepare for the new segment
22456 segmentTransmuxer.reset(this.transmuxer_);
22457 }
22458
22459 this.mediaIndex = null;
22460 this.partIndex = null;
22461 this.syncPoint_ = null;
22462 this.isPendingTimestampOffset_ = false;
22463 this.callQueue_ = [];
22464 this.loadQueue_ = [];
22465 this.metadataQueue_.id3 = [];
22466 this.metadataQueue_.caption = [];
22467 this.abort();
22468
22469 if (this.transmuxer_) {
22470 this.transmuxer_.postMessage({
22471 action: 'clearParsedMp4Captions'
22472 });
22473 }
22474 }
22475 /**
22476 * Remove any data in the source buffer between start and end times
22477 *
22478 * @param {number} start - the start time of the region to remove from the buffer
22479 * @param {number} end - the end time of the region to remove from the buffer
22480 * @param {Function} [done] - an optional callback to be executed when the remove
22481 * @param {boolean} force - force all remove operations to happen
22482 * operation is complete
22483 */
22484 ;
22485
22486 _proto.remove = function remove(start, end, done, force) {
22487 if (done === void 0) {
22488 done = function done() {};
22489 }
22490
22491 if (force === void 0) {
22492 force = false;
22493 }
22494
22495 // clamp end to duration if we need to remove everything.
22496 // This is due to a browser bug that causes issues if we remove to Infinity.
22497 // videojs/videojs-contrib-hls#1225
22498 if (end === Infinity) {
22499 end = this.duration_();
22500 } // skip removes that would throw an error
22501 // commonly happens during a rendition switch at the start of a video
22502 // from start 0 to end 0
22503
22504
22505 if (end <= start) {
22506 this.logger_('skipping remove because end ${end} is <= start ${start}');
22507 return;
22508 }
22509
22510 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
22511 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
22512
22513 return;
22514 } // set it to one to complete this function's removes
22515
22516
22517 var removesRemaining = 1;
22518
22519 var removeFinished = function removeFinished() {
22520 removesRemaining--;
22521
22522 if (removesRemaining === 0) {
22523 done();
22524 }
22525 };
22526
22527 if (force || !this.audioDisabled_) {
22528 removesRemaining++;
22529 this.sourceUpdater_.removeAudio(start, end, removeFinished);
22530 } // While it would be better to only remove video if the main loader has video, this
22531 // should be safe with audio only as removeVideo will call back even if there's no
22532 // video buffer.
22533 //
22534 // In theory we can check to see if there's video before calling the remove, but in
22535 // the event that we're switching between renditions and from video to audio only
22536 // (when we add support for that), we may need to clear the video contents despite
22537 // what the new media will contain.
22538
22539
22540 if (force || this.loaderType_ === 'main') {
22541 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
22542 removesRemaining++;
22543 this.sourceUpdater_.removeVideo(start, end, removeFinished);
22544 } // remove any captions and ID3 tags
22545
22546
22547 for (var track in this.inbandTextTracks_) {
22548 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
22549 }
22550
22551 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
22552
22553 removeFinished();
22554 }
22555 /**
22556 * (re-)schedule monitorBufferTick_ to run as soon as possible
22557 *
22558 * @private
22559 */
22560 ;
22561
22562 _proto.monitorBuffer_ = function monitorBuffer_() {
22563 if (this.checkBufferTimeout_) {
22564 window.clearTimeout(this.checkBufferTimeout_);
22565 }
22566
22567 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);
22568 }
22569 /**
22570 * As long as the SegmentLoader is in the READY state, periodically
22571 * invoke fillBuffer_().
22572 *
22573 * @private
22574 */
22575 ;
22576
22577 _proto.monitorBufferTick_ = function monitorBufferTick_() {
22578 if (this.state === 'READY') {
22579 this.fillBuffer_();
22580 }
22581
22582 if (this.checkBufferTimeout_) {
22583 window.clearTimeout(this.checkBufferTimeout_);
22584 }
22585
22586 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
22587 }
22588 /**
22589 * fill the buffer with segements unless the sourceBuffers are
22590 * currently updating
22591 *
22592 * Note: this function should only ever be called by monitorBuffer_
22593 * and never directly
22594 *
22595 * @private
22596 */
22597 ;
22598
22599 _proto.fillBuffer_ = function fillBuffer_() {
22600 // TODO since the source buffer maintains a queue, and we shouldn't call this function
22601 // except when we're ready for the next segment, this check can most likely be removed
22602 if (this.sourceUpdater_.updating()) {
22603 return;
22604 } // see if we need to begin loading immediately
22605
22606
22607 var segmentInfo = this.chooseNextRequest_();
22608
22609 if (!segmentInfo) {
22610 return;
22611 }
22612
22613 if (typeof segmentInfo.timestampOffset === 'number') {
22614 this.isPendingTimestampOffset_ = false;
22615 this.timelineChangeController_.pendingTimelineChange({
22616 type: this.loaderType_,
22617 from: this.currentTimeline_,
22618 to: segmentInfo.timeline
22619 });
22620 }
22621
22622 this.loadSegment_(segmentInfo);
22623 }
22624 /**
22625 * Determines if we should call endOfStream on the media source based
22626 * on the state of the buffer or if appened segment was the final
22627 * segment in the playlist.
22628 *
22629 * @param {number} [mediaIndex] the media index of segment we last appended
22630 * @param {Object} [playlist] a media playlist object
22631 * @return {boolean} do we need to call endOfStream on the MediaSource
22632 */
22633 ;
22634
22635 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
22636 if (mediaIndex === void 0) {
22637 mediaIndex = this.mediaIndex;
22638 }
22639
22640 if (playlist === void 0) {
22641 playlist = this.playlist_;
22642 }
22643
22644 if (partIndex === void 0) {
22645 partIndex = this.partIndex;
22646 }
22647
22648 if (!playlist || !this.mediaSource_) {
22649 return false;
22650 }
22651
22652 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
22653
22654 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
22655
22656 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
22657 // so that MediaSources can trigger the `ended` event when it runs out of
22658 // buffered data instead of waiting for me
22659
22660 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
22661 }
22662 /**
22663 * Determines what request should be made given current segment loader state.
22664 *
22665 * @return {Object} a request object that describes the segment/part to load
22666 */
22667 ;
22668
22669 _proto.chooseNextRequest_ = function chooseNextRequest_() {
22670 var buffered = this.buffered_();
22671 var bufferedEnd = lastBufferedEnd(buffered) || 0;
22672 var bufferedTime = timeAheadOf(buffered, this.currentTime_());
22673 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
22674 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
22675 var segments = this.playlist_.segments; // return no segment if:
22676 // 1. we don't have segments
22677 // 2. The video has not yet played and we already downloaded a segment
22678 // 3. we already have enough buffered time
22679
22680 if (!segments.length || preloaded || haveEnoughBuffer) {
22681 return null;
22682 }
22683
22684 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
22685 var next = {
22686 partIndex: null,
22687 mediaIndex: null,
22688 startOfSegment: null,
22689 playlist: this.playlist_,
22690 isSyncRequest: Boolean(!this.syncPoint_)
22691 };
22692
22693 if (next.isSyncRequest) {
22694 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
22695 } else if (this.mediaIndex !== null) {
22696 var segment = segments[this.mediaIndex];
22697 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
22698 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
22699
22700 if (segment.parts && segment.parts[partIndex + 1]) {
22701 next.mediaIndex = this.mediaIndex;
22702 next.partIndex = partIndex + 1;
22703 } else {
22704 next.mediaIndex = this.mediaIndex + 1;
22705 }
22706 } else {
22707 // Find the segment containing the end of the buffer or current time.
22708 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
22709 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
22710 playlist: this.playlist_,
22711 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
22712 startingPartIndex: this.syncPoint_.partIndex,
22713 startingSegmentIndex: this.syncPoint_.segmentIndex,
22714 startTime: this.syncPoint_.time
22715 }),
22716 segmentIndex = _Playlist$getMediaInf.segmentIndex,
22717 startTime = _Playlist$getMediaInf.startTime,
22718 _partIndex = _Playlist$getMediaInf.partIndex;
22719
22720 next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
22721 next.mediaIndex = segmentIndex;
22722 next.startOfSegment = startTime;
22723 next.partIndex = _partIndex;
22724 }
22725
22726 var nextSegment = segments[next.mediaIndex];
22727 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
22728 // the next partIndex is invalid do not choose a next segment.
22729
22730 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
22731 return null;
22732 } // if the next segment has parts, and we don't have a partIndex.
22733 // Set partIndex to 0
22734
22735
22736 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
22737 next.partIndex = 0;
22738 nextPart = nextSegment.parts[0];
22739 } // if we have no buffered data then we need to make sure
22740 // that the next part we append is "independent" if possible.
22741 // So we check if the previous part is independent, and request
22742 // it if it is.
22743
22744
22745 if (!bufferedTime && nextPart && !nextPart.independent) {
22746 if (next.partIndex === 0) {
22747 var lastSegment = segments[next.mediaIndex - 1];
22748 var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
22749
22750 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
22751 next.mediaIndex -= 1;
22752 next.partIndex = lastSegment.parts.length - 1;
22753 next.independent = 'previous segment';
22754 }
22755 } else if (nextSegment.parts[next.partIndex - 1].independent) {
22756 next.partIndex -= 1;
22757 next.independent = 'previous part';
22758 }
22759 }
22760
22761 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
22762 // 1. this is the last segment in the playlist
22763 // 2. end of stream has been called on the media source already
22764 // 3. the player is not seeking
22765
22766 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
22767 return null;
22768 }
22769
22770 return this.generateSegmentInfo_(next);
22771 };
22772
22773 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
22774 var independent = options.independent,
22775 playlist = options.playlist,
22776 mediaIndex = options.mediaIndex,
22777 startOfSegment = options.startOfSegment,
22778 isSyncRequest = options.isSyncRequest,
22779 partIndex = options.partIndex,
22780 forceTimestampOffset = options.forceTimestampOffset,
22781 getMediaInfoForTime = options.getMediaInfoForTime;
22782 var segment = playlist.segments[mediaIndex];
22783 var part = typeof partIndex === 'number' && segment.parts[partIndex];
22784 var segmentInfo = {
22785 requestId: 'segment-loader-' + Math.random(),
22786 // resolve the segment URL relative to the playlist
22787 uri: part && part.resolvedUri || segment.resolvedUri,
22788 // the segment's mediaIndex at the time it was requested
22789 mediaIndex: mediaIndex,
22790 partIndex: part ? partIndex : null,
22791 // whether or not to update the SegmentLoader's state with this
22792 // segment's mediaIndex
22793 isSyncRequest: isSyncRequest,
22794 startOfSegment: startOfSegment,
22795 // the segment's playlist
22796 playlist: playlist,
22797 // unencrypted bytes of the segment
22798 bytes: null,
22799 // when a key is defined for this segment, the encrypted bytes
22800 encryptedBytes: null,
22801 // The target timestampOffset for this segment when we append it
22802 // to the source buffer
22803 timestampOffset: null,
22804 // The timeline that the segment is in
22805 timeline: segment.timeline,
22806 // The expected duration of the segment in seconds
22807 duration: part && part.duration || segment.duration,
22808 // retain the segment in case the playlist updates while doing an async process
22809 segment: segment,
22810 part: part,
22811 byteLength: 0,
22812 transmuxer: this.transmuxer_,
22813 // type of getMediaInfoForTime that was used to get this segment
22814 getMediaInfoForTime: getMediaInfoForTime,
22815 independent: independent
22816 };
22817 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
22818 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
22819 segmentTimeline: segment.timeline,
22820 currentTimeline: this.currentTimeline_,
22821 startOfSegment: startOfSegment,
22822 buffered: this.buffered_(),
22823 overrideCheck: overrideCheck
22824 });
22825 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
22826
22827 if (typeof audioBufferedEnd === 'number') {
22828 // since the transmuxer is using the actual timing values, but the buffer is
22829 // adjusted by the timestamp offset, we must adjust the value here
22830 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
22831 }
22832
22833 if (this.sourceUpdater_.videoBuffered().length) {
22834 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
22835 // adjusted by the timestmap offset, we must adjust the value here
22836 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
22837 }
22838
22839 return segmentInfo;
22840 } // get the timestampoffset for a segment,
22841 // added so that vtt segment loader can override and prevent
22842 // adding timestamp offsets.
22843 ;
22844
22845 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
22846 return timestampOffsetForSegment(options);
22847 }
22848 /**
22849 * Determines if the network has enough bandwidth to complete the current segment
22850 * request in a timely manner. If not, the request will be aborted early and bandwidth
22851 * updated to trigger a playlist switch.
22852 *
22853 * @param {Object} stats
22854 * Object containing stats about the request timing and size
22855 * @private
22856 */
22857 ;
22858
22859 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
22860 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
22861 // TODO: Replace using timeout with a boolean indicating whether this playlist is
22862 // the lowestEnabledRendition.
22863 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
22864 !this.playlist_.attributes.BANDWIDTH) {
22865 return;
22866 } // Wait at least 1 second since the first byte of data has been received before
22867 // using the calculated bandwidth from the progress event to allow the bitrate
22868 // to stabilize
22869
22870
22871 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
22872 return;
22873 }
22874
22875 var currentTime = this.currentTime_();
22876 var measuredBandwidth = stats.bandwidth;
22877 var segmentDuration = this.pendingSegment_.duration;
22878 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
22879 // if we are only left with less than 1 second when the request completes.
22880 // A negative timeUntilRebuffering indicates we are already rebuffering
22881
22882 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
22883 // is larger than the estimated time until the player runs out of forward buffer
22884
22885 if (requestTimeRemaining <= timeUntilRebuffer$1) {
22886 return;
22887 }
22888
22889 var switchCandidate = minRebufferMaxBandwidthSelector({
22890 master: this.vhs_.playlists.master,
22891 currentTime: currentTime,
22892 bandwidth: measuredBandwidth,
22893 duration: this.duration_(),
22894 segmentDuration: segmentDuration,
22895 timeUntilRebuffer: timeUntilRebuffer$1,
22896 currentTimeline: this.currentTimeline_,
22897 syncController: this.syncController_
22898 });
22899
22900 if (!switchCandidate) {
22901 return;
22902 }
22903
22904 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
22905 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
22906 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
22907 // potential round trip time of the new request so that we are not too aggressive
22908 // with switching to a playlist that might save us a fraction of a second.
22909
22910 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
22911 minimumTimeSaving = 1;
22912 }
22913
22914 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
22915 return;
22916 } // set the bandwidth to that of the desired playlist being sure to scale by
22917 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
22918 // don't trigger a bandwidthupdate as the bandwidth is artifial
22919
22920
22921 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
22922 this.trigger('earlyabort');
22923 };
22924
22925 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
22926 this.logger_("Aborting " + segmentInfoString(segmentInfo));
22927 this.mediaRequestsAborted += 1;
22928 }
22929 /**
22930 * XHR `progress` event handler
22931 *
22932 * @param {Event}
22933 * The XHR `progress` event
22934 * @param {Object} simpleSegment
22935 * A simplified segment object copy
22936 * @private
22937 */
22938 ;
22939
22940 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
22941 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22942
22943 if (this.checkForAbort_(simpleSegment.requestId)) {
22944 return;
22945 }
22946
22947 this.trigger('progress');
22948 };
22949
22950 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
22951 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22952
22953 if (this.checkForAbort_(simpleSegment.requestId)) {
22954 return;
22955 }
22956
22957 if (this.checkForIllegalMediaSwitch(trackInfo)) {
22958 return;
22959 }
22960
22961 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
22962 // Guard against cases where we're not getting track info at all until we are
22963 // certain that all streams will provide it.
22964
22965 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
22966 this.appendInitSegment_ = {
22967 audio: true,
22968 video: true
22969 };
22970 this.startingMediaInfo_ = trackInfo;
22971 this.currentMediaInfo_ = trackInfo;
22972 this.logger_('trackinfo update', trackInfo);
22973 this.trigger('trackinfo');
22974 } // trackinfo may cause an abort if the trackinfo
22975 // causes a codec change to an unsupported codec.
22976
22977
22978 if (this.checkForAbort_(simpleSegment.requestId)) {
22979 return;
22980 } // set trackinfo on the pending segment so that
22981 // it can append.
22982
22983
22984 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
22985
22986 if (this.hasEnoughInfoToAppend_()) {
22987 this.processCallQueue_();
22988 }
22989 };
22990
22991 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
22992 this.earlyAbortWhenNeeded_(simpleSegment.stats);
22993
22994 if (this.checkForAbort_(simpleSegment.requestId)) {
22995 return;
22996 }
22997
22998 var segmentInfo = this.pendingSegment_;
22999 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
23000 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
23001 segmentInfo[timingInfoProperty][timeType] = time;
23002 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
23003
23004 if (this.hasEnoughInfoToAppend_()) {
23005 this.processCallQueue_();
23006 }
23007 };
23008
23009 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
23010 var _this2 = this;
23011
23012 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23013
23014 if (this.checkForAbort_(simpleSegment.requestId)) {
23015 return;
23016 } // This could only happen with fmp4 segments, but
23017 // should still not happen in general
23018
23019
23020 if (captionData.length === 0) {
23021 this.logger_('SegmentLoader received no captions from a caption event');
23022 return;
23023 }
23024
23025 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
23026 // can be adjusted by the timestamp offset
23027
23028 if (!segmentInfo.hasAppendedData_) {
23029 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
23030 return;
23031 }
23032
23033 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
23034 var captionTracks = {}; // get total start/end and captions for each track/stream
23035
23036 captionData.forEach(function (caption) {
23037 // caption.stream is actually a track name...
23038 // set to the existing values in tracks or default values
23039 captionTracks[caption.stream] = captionTracks[caption.stream] || {
23040 // Infinity, as any other value will be less than this
23041 startTime: Infinity,
23042 captions: [],
23043 // 0 as an other value will be more than this
23044 endTime: 0
23045 };
23046 var captionTrack = captionTracks[caption.stream];
23047 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
23048 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
23049 captionTrack.captions.push(caption);
23050 });
23051 Object.keys(captionTracks).forEach(function (trackName) {
23052 var _captionTracks$trackN = captionTracks[trackName],
23053 startTime = _captionTracks$trackN.startTime,
23054 endTime = _captionTracks$trackN.endTime,
23055 captions = _captionTracks$trackN.captions;
23056 var inbandTextTracks = _this2.inbandTextTracks_;
23057
23058 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
23059
23060 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
23061 // We do this because a rendition change that also changes the timescale for captions
23062 // will result in captions being re-parsed for certain segments. If we add them again
23063 // without clearing we will have two of the same captions visible.
23064
23065 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
23066 addCaptionData({
23067 captionArray: captions,
23068 inbandTextTracks: inbandTextTracks,
23069 timestampOffset: timestampOffset
23070 });
23071 }); // Reset stored captions since we added parsed
23072 // captions to a text track at this point
23073
23074 if (this.transmuxer_) {
23075 this.transmuxer_.postMessage({
23076 action: 'clearParsedMp4Captions'
23077 });
23078 }
23079 };
23080
23081 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
23082 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23083
23084 if (this.checkForAbort_(simpleSegment.requestId)) {
23085 return;
23086 }
23087
23088 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
23089
23090 if (!segmentInfo.hasAppendedData_) {
23091 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
23092 return;
23093 }
23094
23095 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
23096 // audio/video source with a metadata track, and an alt audio with a metadata track.
23097 // However, this probably won't happen, and if it does it can be handled then.
23098
23099 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
23100 addMetadata({
23101 inbandTextTracks: this.inbandTextTracks_,
23102 metadataArray: id3Frames,
23103 timestampOffset: timestampOffset,
23104 videoDuration: this.duration_()
23105 });
23106 };
23107
23108 _proto.processMetadataQueue_ = function processMetadataQueue_() {
23109 this.metadataQueue_.id3.forEach(function (fn) {
23110 return fn();
23111 });
23112 this.metadataQueue_.caption.forEach(function (fn) {
23113 return fn();
23114 });
23115 this.metadataQueue_.id3 = [];
23116 this.metadataQueue_.caption = [];
23117 };
23118
23119 _proto.processCallQueue_ = function processCallQueue_() {
23120 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
23121 // functions may check the length of the load queue and default to pushing themselves
23122 // back onto the queue.
23123
23124 this.callQueue_ = [];
23125 callQueue.forEach(function (fun) {
23126 return fun();
23127 });
23128 };
23129
23130 _proto.processLoadQueue_ = function processLoadQueue_() {
23131 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
23132 // functions may check the length of the load queue and default to pushing themselves
23133 // back onto the queue.
23134
23135 this.loadQueue_ = [];
23136 loadQueue.forEach(function (fun) {
23137 return fun();
23138 });
23139 }
23140 /**
23141 * Determines whether the loader has enough info to load the next segment.
23142 *
23143 * @return {boolean}
23144 * Whether or not the loader has enough info to load the next segment
23145 */
23146 ;
23147
23148 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
23149 // Since primary timing goes by video, only the audio loader potentially needs to wait
23150 // to load.
23151 if (this.loaderType_ !== 'audio') {
23152 return true;
23153 }
23154
23155 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
23156 // enough info to load.
23157
23158 if (!segmentInfo) {
23159 return false;
23160 } // The first segment can and should be loaded immediately so that source buffers are
23161 // created together (before appending). Source buffer creation uses the presence of
23162 // audio and video data to determine whether to create audio/video source buffers, and
23163 // uses processed (transmuxed or parsed) media to determine the types required.
23164
23165
23166 if (!this.getCurrentMediaInfo_()) {
23167 return true;
23168 }
23169
23170 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
23171 // can be requested and downloaded and only wait before it is transmuxed or parsed.
23172 // But in practice, there are a few reasons why it is better to wait until a loader
23173 // is ready to append that segment before requesting and downloading:
23174 //
23175 // 1. Because audio and main loaders cross discontinuities together, if this loader
23176 // is waiting for the other to catch up, then instead of requesting another
23177 // segment and using up more bandwidth, by not yet loading, more bandwidth is
23178 // allotted to the loader currently behind.
23179 // 2. media-segment-request doesn't have to have logic to consider whether a segment
23180 // is ready to be processed or not, isolating the queueing behavior to the loader.
23181 // 3. The audio loader bases some of its segment properties on timing information
23182 // provided by the main loader, meaning that, if the logic for waiting on
23183 // processing was in media-segment-request, then it would also need to know how
23184 // to re-generate the segment information after the main loader caught up.
23185 shouldWaitForTimelineChange({
23186 timelineChangeController: this.timelineChangeController_,
23187 currentTimeline: this.currentTimeline_,
23188 segmentTimeline: segmentInfo.timeline,
23189 loaderType: this.loaderType_,
23190 audioDisabled: this.audioDisabled_
23191 })) {
23192 return false;
23193 }
23194
23195 return true;
23196 };
23197
23198 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
23199 if (segmentInfo === void 0) {
23200 segmentInfo = this.pendingSegment_;
23201 }
23202
23203 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
23204 };
23205
23206 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
23207 if (segmentInfo === void 0) {
23208 segmentInfo = this.pendingSegment_;
23209 }
23210
23211 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
23212 };
23213
23214 _proto.getPendingSegmentPlaylist = function getPendingSegmentPlaylist() {
23215 return this.pendingSegment_ ? this.pendingSegment_.playlist : null;
23216 };
23217
23218 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
23219 if (!this.sourceUpdater_.ready()) {
23220 return false;
23221 } // If content needs to be removed or the loader is waiting on an append reattempt,
23222 // then no additional content should be appended until the prior append is resolved.
23223
23224
23225 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
23226 return false;
23227 }
23228
23229 var segmentInfo = this.pendingSegment_;
23230 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
23231 // we do not have information on this specific
23232 // segment yet
23233
23234 if (!segmentInfo || !trackInfo) {
23235 return false;
23236 }
23237
23238 var hasAudio = trackInfo.hasAudio,
23239 hasVideo = trackInfo.hasVideo,
23240 isMuxed = trackInfo.isMuxed;
23241
23242 if (hasVideo && !segmentInfo.videoTimingInfo) {
23243 return false;
23244 } // muxed content only relies on video timing information for now.
23245
23246
23247 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
23248 return false;
23249 }
23250
23251 if (shouldWaitForTimelineChange({
23252 timelineChangeController: this.timelineChangeController_,
23253 currentTimeline: this.currentTimeline_,
23254 segmentTimeline: segmentInfo.timeline,
23255 loaderType: this.loaderType_,
23256 audioDisabled: this.audioDisabled_
23257 })) {
23258 return false;
23259 }
23260
23261 return true;
23262 };
23263
23264 _proto.handleData_ = function handleData_(simpleSegment, result) {
23265 this.earlyAbortWhenNeeded_(simpleSegment.stats);
23266
23267 if (this.checkForAbort_(simpleSegment.requestId)) {
23268 return;
23269 } // If there's anything in the call queue, then this data came later and should be
23270 // executed after the calls currently queued.
23271
23272
23273 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
23274 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
23275 return;
23276 }
23277
23278 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
23279
23280 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
23281
23282 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
23283 // logic may change behavior depending on the state, and changing state too early may
23284 // inflate our estimates of bandwidth. In the future this should be re-examined to
23285 // note more granular states.
23286 // don't process and append data if the mediaSource is closed
23287
23288 if (this.mediaSource_.readyState === 'closed') {
23289 return;
23290 } // if this request included an initialization segment, save that data
23291 // to the initSegment cache
23292
23293
23294 if (simpleSegment.map) {
23295 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
23296
23297 segmentInfo.segment.map = simpleSegment.map;
23298 } // if this request included a segment key, save that data in the cache
23299
23300
23301 if (simpleSegment.key) {
23302 this.segmentKey(simpleSegment.key, true);
23303 }
23304
23305 segmentInfo.isFmp4 = simpleSegment.isFmp4;
23306 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
23307
23308 if (segmentInfo.isFmp4) {
23309 this.trigger('fmp4');
23310 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
23311 } else {
23312 var trackInfo = this.getCurrentMediaInfo_();
23313 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
23314 var firstVideoFrameTimeForData;
23315
23316 if (useVideoTimingInfo) {
23317 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
23318 } // Segment loader knows more about segment timing than the transmuxer (in certain
23319 // aspects), so make any changes required for a more accurate start time.
23320 // Don't set the end time yet, as the segment may not be finished processing.
23321
23322
23323 segmentInfo.timingInfo.start = this.trueSegmentStart_({
23324 currentStart: segmentInfo.timingInfo.start,
23325 playlist: segmentInfo.playlist,
23326 mediaIndex: segmentInfo.mediaIndex,
23327 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
23328 useVideoTimingInfo: useVideoTimingInfo,
23329 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
23330 videoTimingInfo: segmentInfo.videoTimingInfo,
23331 audioTimingInfo: segmentInfo.audioTimingInfo
23332 });
23333 } // Init segments for audio and video only need to be appended in certain cases. Now
23334 // that data is about to be appended, we can check the final cases to determine
23335 // whether we should append an init segment.
23336
23337
23338 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
23339 // as we use the start of the segment to offset the best guess (playlist provided)
23340 // timestamp offset.
23341
23342 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
23343 // be appended or not.
23344
23345 if (segmentInfo.isSyncRequest) {
23346 // first save/update our timing info for this segment.
23347 // this is what allows us to choose an accurate segment
23348 // and the main reason we make a sync request.
23349 this.updateTimingInfoEnd_(segmentInfo);
23350 this.syncController_.saveSegmentTimingInfo({
23351 segmentInfo: segmentInfo,
23352 shouldSaveTimelineMapping: this.loaderType_ === 'main'
23353 });
23354 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
23355 // after taking into account its timing info, do not append it.
23356
23357 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
23358 this.logger_('sync segment was incorrect, not appending');
23359 return;
23360 } // otherwise append it like any other segment as our guess was correct.
23361
23362
23363 this.logger_('sync segment was correct, appending');
23364 } // Save some state so that in the future anything waiting on first append (and/or
23365 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
23366 // we need some notion of whether the timestamp offset or other relevant information
23367 // has had a chance to be set.
23368
23369
23370 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
23371
23372 this.processMetadataQueue_();
23373 this.appendData_(segmentInfo, result);
23374 };
23375
23376 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
23377 // alt audio doesn't manage timestamp offset
23378 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
23379 // segment for each chunk
23380 !segmentInfo.changedTimestampOffset) {
23381 // if the timestamp offset changed, the timeline may have changed, so we have to re-
23382 // append init segments
23383 this.appendInitSegment_ = {
23384 audio: true,
23385 video: true
23386 };
23387 }
23388
23389 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
23390 // make sure we append init segment on playlist changes, in case the media config
23391 // changed
23392 this.appendInitSegment_[type] = true;
23393 }
23394 };
23395
23396 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
23397 var type = _ref4.type,
23398 initSegment = _ref4.initSegment,
23399 map = _ref4.map,
23400 playlist = _ref4.playlist;
23401
23402 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
23403 // (Section 3) required to parse the applicable Media Segments. It applies to every
23404 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
23405 // or until the end of the playlist."
23406 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
23407 if (map) {
23408 var id = initSegmentId(map);
23409
23410 if (this.activeInitSegmentId_ === id) {
23411 // don't need to re-append the init segment if the ID matches
23412 return null;
23413 } // a map-specified init segment takes priority over any transmuxed (or otherwise
23414 // obtained) init segment
23415 //
23416 // this also caches the init segment for later use
23417
23418
23419 initSegment = this.initSegmentForMap(map, true).bytes;
23420 this.activeInitSegmentId_ = id;
23421 } // We used to always prepend init segments for video, however, that shouldn't be
23422 // necessary. Instead, we should only append on changes, similar to what we've always
23423 // done for audio. This is more important (though may not be that important) for
23424 // frame-by-frame appending for LHLS, simply because of the increased quantity of
23425 // appends.
23426
23427
23428 if (initSegment && this.appendInitSegment_[type]) {
23429 // Make sure we track the playlist that we last used for the init segment, so that
23430 // we can re-append the init segment in the event that we get data from a new
23431 // playlist. Discontinuities and track changes are handled in other sections.
23432 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
23433
23434 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
23435 // we are appending the muxer init segment
23436
23437 this.activeInitSegmentId_ = null;
23438 return initSegment;
23439 }
23440
23441 return null;
23442 };
23443
23444 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
23445 var _this3 = this;
23446
23447 var segmentInfo = _ref5.segmentInfo,
23448 type = _ref5.type,
23449 bytes = _ref5.bytes;
23450 var audioBuffered = this.sourceUpdater_.audioBuffered();
23451 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
23452 // should be cleared out during the buffer removals. However, log in case it helps
23453 // debug.
23454
23455 if (audioBuffered.length > 1) {
23456 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
23457 }
23458
23459 if (videoBuffered.length > 1) {
23460 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
23461 }
23462
23463 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
23464 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
23465 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
23466 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
23467
23468 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
23469 // Can't remove enough buffer to make room for new segment (or the browser doesn't
23470 // allow for appends of segments this size). In the future, it may be possible to
23471 // split up the segment and append in pieces, but for now, error out this playlist
23472 // in an attempt to switch to a more manageable rendition.
23473 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
23474 this.error({
23475 message: 'Quota exceeded error with append of a single segment of content',
23476 excludeUntil: Infinity
23477 });
23478 this.trigger('error');
23479 return;
23480 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
23481 // that the segment-loader should block on future events until this one is handled, so
23482 // that it doesn't keep moving onto further segments. Adding the call to the call
23483 // queue will prevent further appends until waitingOnRemove_ and
23484 // quotaExceededErrorRetryTimeout_ are cleared.
23485 //
23486 // Note that this will only block the current loader. In the case of demuxed content,
23487 // the other load may keep filling as fast as possible. In practice, this should be
23488 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
23489 // source buffer, or video fills without enough room for audio to append (and without
23490 // the availability of clearing out seconds of back buffer to make room for audio).
23491 // But it might still be good to handle this case in the future as a TODO.
23492
23493
23494 this.waitingOnRemove_ = true;
23495 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
23496 segmentInfo: segmentInfo,
23497 type: type,
23498 bytes: bytes
23499 }));
23500 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
23501 // before retrying.
23502
23503 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
23504 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
23505 this.remove(0, timeToRemoveUntil, function () {
23506 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
23507
23508 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
23509 // attempts (since we can't clear less than the minimum)
23510
23511 _this3.quotaExceededErrorRetryTimeout_ = window.setTimeout(function () {
23512 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
23513
23514 _this3.quotaExceededErrorRetryTimeout_ = null;
23515
23516 _this3.processCallQueue_();
23517 }, MIN_BACK_BUFFER * 1000);
23518 }, true);
23519 };
23520
23521 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
23522 var segmentInfo = _ref6.segmentInfo,
23523 type = _ref6.type,
23524 bytes = _ref6.bytes;
23525
23526 // if there's no error, nothing to do
23527 if (!error) {
23528 return;
23529 }
23530
23531 if (error.code === QUOTA_EXCEEDED_ERR) {
23532 this.handleQuotaExceededError_({
23533 segmentInfo: segmentInfo,
23534 type: type,
23535 bytes: bytes
23536 }); // A quota exceeded error should be recoverable with a future re-append, so no need
23537 // to trigger an append error.
23538
23539 return;
23540 }
23541
23542 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
23543 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
23544 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
23545 //
23546 // Trigger a special error so that it can be handled separately from normal,
23547 // recoverable errors.
23548
23549 this.trigger('appenderror');
23550 };
23551
23552 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
23553 var segmentInfo = _ref7.segmentInfo,
23554 type = _ref7.type,
23555 initSegment = _ref7.initSegment,
23556 data = _ref7.data,
23557 bytes = _ref7.bytes;
23558
23559 // If this is a re-append, bytes were already created and don't need to be recreated
23560 if (!bytes) {
23561 var segments = [data];
23562 var byteLength = data.byteLength;
23563
23564 if (initSegment) {
23565 // if the media initialization segment is changing, append it before the content
23566 // segment
23567 segments.unshift(initSegment);
23568 byteLength += initSegment.byteLength;
23569 } // Technically we should be OK appending the init segment separately, however, we
23570 // haven't yet tested that, and prepending is how we have always done things.
23571
23572
23573 bytes = concatSegments({
23574 bytes: byteLength,
23575 segments: segments
23576 });
23577 }
23578
23579 this.sourceUpdater_.appendBuffer({
23580 segmentInfo: segmentInfo,
23581 type: type,
23582 bytes: bytes
23583 }, this.handleAppendError_.bind(this, {
23584 segmentInfo: segmentInfo,
23585 type: type,
23586 bytes: bytes
23587 }));
23588 };
23589
23590 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
23591 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
23592 return;
23593 }
23594
23595 var segment = this.pendingSegment_.segment;
23596 var timingInfoProperty = type + "TimingInfo";
23597
23598 if (!segment[timingInfoProperty]) {
23599 segment[timingInfoProperty] = {};
23600 }
23601
23602 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
23603 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
23604 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
23605 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
23606 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
23607
23608 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
23609 };
23610
23611 _proto.appendData_ = function appendData_(segmentInfo, result) {
23612 var type = result.type,
23613 data = result.data;
23614
23615 if (!data || !data.byteLength) {
23616 return;
23617 }
23618
23619 if (type === 'audio' && this.audioDisabled_) {
23620 return;
23621 }
23622
23623 var initSegment = this.getInitSegmentAndUpdateState_({
23624 type: type,
23625 initSegment: result.initSegment,
23626 playlist: segmentInfo.playlist,
23627 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
23628 });
23629 this.appendToSourceBuffer_({
23630 segmentInfo: segmentInfo,
23631 type: type,
23632 initSegment: initSegment,
23633 data: data
23634 });
23635 }
23636 /**
23637 * load a specific segment from a request into the buffer
23638 *
23639 * @private
23640 */
23641 ;
23642
23643 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
23644 var _this4 = this;
23645
23646 this.state = 'WAITING';
23647 this.pendingSegment_ = segmentInfo;
23648 this.trimBackBuffer_(segmentInfo);
23649
23650 if (typeof segmentInfo.timestampOffset === 'number') {
23651 if (this.transmuxer_) {
23652 this.transmuxer_.postMessage({
23653 action: 'clearAllMp4Captions'
23654 });
23655 }
23656 }
23657
23658 if (!this.hasEnoughInfoToLoad_()) {
23659 this.loadQueue_.push(function () {
23660 // regenerate the audioAppendStart, timestampOffset, etc as they
23661 // may have changed since this function was added to the queue.
23662 var options = _extends_1({}, segmentInfo, {
23663 forceTimestampOffset: true
23664 });
23665
23666 _extends_1(segmentInfo, _this4.generateSegmentInfo_(options));
23667
23668 _this4.isPendingTimestampOffset_ = false;
23669
23670 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
23671 });
23672 return;
23673 }
23674
23675 this.updateTransmuxerAndRequestSegment_(segmentInfo);
23676 };
23677
23678 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
23679 var _this5 = this;
23680
23681 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
23682 // the transmuxer still needs to be updated before then.
23683 //
23684 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
23685 // offset must be passed to the transmuxer for stream correcting adjustments.
23686 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
23687 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
23688
23689 segmentInfo.gopsToAlignWith = [];
23690 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
23691
23692 this.transmuxer_.postMessage({
23693 action: 'reset'
23694 });
23695 this.transmuxer_.postMessage({
23696 action: 'setTimestampOffset',
23697 timestampOffset: segmentInfo.timestampOffset
23698 });
23699 }
23700
23701 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
23702 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
23703 var isWalkingForward = this.mediaIndex !== null;
23704 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
23705 // the first timeline
23706 segmentInfo.timeline > 0;
23707 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
23708 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
23709 // then this init segment has never been seen before and should be appended.
23710 //
23711 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
23712 // both to true and leave the decision of whether to append the init segment to append time.
23713
23714 if (simpleSegment.map && !simpleSegment.map.bytes) {
23715 this.logger_('going to request init segment.');
23716 this.appendInitSegment_ = {
23717 video: true,
23718 audio: true
23719 };
23720 }
23721
23722 segmentInfo.abortRequests = mediaSegmentRequest({
23723 xhr: this.vhs_.xhr,
23724 xhrOptions: this.xhrOptions_,
23725 decryptionWorker: this.decrypter_,
23726 segment: simpleSegment,
23727 abortFn: this.handleAbort_.bind(this, segmentInfo),
23728 progressFn: this.handleProgress_.bind(this),
23729 trackInfoFn: this.handleTrackInfo_.bind(this),
23730 timingInfoFn: this.handleTimingInfo_.bind(this),
23731 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
23732 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
23733 captionsFn: this.handleCaptions_.bind(this),
23734 isEndOfTimeline: isEndOfTimeline,
23735 endedTimelineFn: function endedTimelineFn() {
23736 _this5.logger_('received endedtimeline callback');
23737 },
23738 id3Fn: this.handleId3_.bind(this),
23739 dataFn: this.handleData_.bind(this),
23740 doneFn: this.segmentRequestFinished_.bind(this),
23741 onTransmuxerLog: function onTransmuxerLog(_ref8) {
23742 var message = _ref8.message,
23743 level = _ref8.level,
23744 stream = _ref8.stream;
23745
23746 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
23747 }
23748 });
23749 }
23750 /**
23751 * trim the back buffer so that we don't have too much data
23752 * in the source buffer
23753 *
23754 * @private
23755 *
23756 * @param {Object} segmentInfo - the current segment
23757 */
23758 ;
23759
23760 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
23761 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
23762 // buffer and a very conservative "garbage collector"
23763 // We manually clear out the old buffer to ensure
23764 // we don't trigger the QuotaExceeded error
23765 // on the source buffer during subsequent appends
23766
23767 if (removeToTime > 0) {
23768 this.remove(0, removeToTime);
23769 }
23770 }
23771 /**
23772 * created a simplified copy of the segment object with just the
23773 * information necessary to perform the XHR and decryption
23774 *
23775 * @private
23776 *
23777 * @param {Object} segmentInfo - the current segment
23778 * @return {Object} a simplified segment object copy
23779 */
23780 ;
23781
23782 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
23783 var segment = segmentInfo.segment;
23784 var part = segmentInfo.part;
23785 var simpleSegment = {
23786 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
23787 byterange: part ? part.byterange : segment.byterange,
23788 requestId: segmentInfo.requestId,
23789 transmuxer: segmentInfo.transmuxer,
23790 audioAppendStart: segmentInfo.audioAppendStart,
23791 gopsToAlignWith: segmentInfo.gopsToAlignWith,
23792 part: segmentInfo.part
23793 };
23794 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
23795
23796 if (previousSegment && previousSegment.timeline === segment.timeline) {
23797 // The baseStartTime of a segment is used to handle rollover when probing the TS
23798 // segment to retrieve timing information. Since the probe only looks at the media's
23799 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
23800 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
23801 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
23802 // seconds of media time, so should be used here. The previous segment is used since
23803 // the end of the previous segment should represent the beginning of the current
23804 // segment, so long as they are on the same timeline.
23805 if (previousSegment.videoTimingInfo) {
23806 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
23807 } else if (previousSegment.audioTimingInfo) {
23808 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
23809 }
23810 }
23811
23812 if (segment.key) {
23813 // if the media sequence is greater than 2^32, the IV will be incorrect
23814 // assuming 10s segments, that would be about 1300 years
23815 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
23816 simpleSegment.key = this.segmentKey(segment.key);
23817 simpleSegment.key.iv = iv;
23818 }
23819
23820 if (segment.map) {
23821 simpleSegment.map = this.initSegmentForMap(segment.map);
23822 }
23823
23824 return simpleSegment;
23825 };
23826
23827 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
23828 // every request counts as a media request even if it has been aborted
23829 // or canceled due to a timeout
23830 this.mediaRequests += 1;
23831
23832 if (stats) {
23833 this.mediaBytesTransferred += stats.bytesReceived;
23834 this.mediaTransferDuration += stats.roundTripTime;
23835 }
23836 };
23837
23838 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
23839 // byteLength will be used for throughput, and should be based on bytes receieved,
23840 // which we only know at the end of the request and should reflect total bytes
23841 // downloaded rather than just bytes processed from components of the segment
23842 this.pendingSegment_.byteLength = stats.bytesReceived;
23843
23844 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
23845 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
23846 return;
23847 }
23848
23849 this.bandwidth = stats.bandwidth;
23850 this.roundTrip = stats.roundTripTime;
23851 };
23852
23853 _proto.handleTimeout_ = function handleTimeout_() {
23854 // although the VTT segment loader bandwidth isn't really used, it's good to
23855 // maintain functinality between segment loaders
23856 this.mediaRequestsTimedout += 1;
23857 this.bandwidth = 1;
23858 this.roundTrip = NaN;
23859 this.trigger('bandwidthupdate');
23860 this.trigger('timeout');
23861 }
23862 /**
23863 * Handle the callback from the segmentRequest function and set the
23864 * associated SegmentLoader state and errors if necessary
23865 *
23866 * @private
23867 */
23868 ;
23869
23870 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
23871 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
23872 // check the call queue directly since this function doesn't need to deal with any
23873 // data, and can continue even if the source buffers are not set up and we didn't get
23874 // any data from the segment
23875 if (this.callQueue_.length) {
23876 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
23877 return;
23878 }
23879
23880 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
23881
23882 if (!this.pendingSegment_) {
23883 return;
23884 } // the request was aborted and the SegmentLoader has already started
23885 // another request. this can happen when the timeout for an aborted
23886 // request triggers due to a limitation in the XHR library
23887 // do not count this as any sort of request or we risk double-counting
23888
23889
23890 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
23891 return;
23892 } // an error occurred from the active pendingSegment_ so reset everything
23893
23894
23895 if (error) {
23896 this.pendingSegment_ = null;
23897 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
23898
23899 if (error.code === REQUEST_ERRORS.ABORTED) {
23900 return;
23901 }
23902
23903 this.pause(); // the error is really just that at least one of the requests timed-out
23904 // set the bandwidth to a very low value and trigger an ABR switch to
23905 // take emergency action
23906
23907 if (error.code === REQUEST_ERRORS.TIMEOUT) {
23908 this.handleTimeout_();
23909 return;
23910 } // if control-flow has arrived here, then the error is real
23911 // emit an error event to blacklist the current playlist
23912
23913
23914 this.mediaRequestsErrored += 1;
23915 this.error(error);
23916 this.trigger('error');
23917 return;
23918 }
23919
23920 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
23921 // generated for ABR purposes
23922
23923 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
23924 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
23925
23926 if (result.gopInfo) {
23927 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
23928 } // Although we may have already started appending on progress, we shouldn't switch the
23929 // state away from loading until we are officially done loading the segment data.
23930
23931
23932 this.state = 'APPENDING'; // used for testing
23933
23934 this.trigger('appending');
23935 this.waitForAppendsToComplete_(segmentInfo);
23936 };
23937
23938 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
23939 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
23940
23941 if (timelineMapping !== null) {
23942 this.timeMapping_ = timelineMapping;
23943 }
23944 };
23945
23946 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
23947 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
23948 this.mediaSecondsLoaded += segment.end - segment.start;
23949 } else {
23950 this.mediaSecondsLoaded += segment.duration;
23951 }
23952 };
23953
23954 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
23955 if (timestampOffset === null) {
23956 return false;
23957 } // note that we're potentially using the same timestamp offset for both video and
23958 // audio
23959
23960
23961 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
23962 return true;
23963 }
23964
23965 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
23966 return true;
23967 }
23968
23969 return false;
23970 };
23971
23972 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
23973 var currentStart = _ref9.currentStart,
23974 playlist = _ref9.playlist,
23975 mediaIndex = _ref9.mediaIndex,
23976 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
23977 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
23978 useVideoTimingInfo = _ref9.useVideoTimingInfo,
23979 videoTimingInfo = _ref9.videoTimingInfo,
23980 audioTimingInfo = _ref9.audioTimingInfo;
23981
23982 if (typeof currentStart !== 'undefined') {
23983 // if start was set once, keep using it
23984 return currentStart;
23985 }
23986
23987 if (!useVideoTimingInfo) {
23988 return audioTimingInfo.start;
23989 }
23990
23991 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
23992 // within that segment. Since the transmuxer maintains a cache of incomplete data
23993 // from and/or the last frame seen, the start time may reflect a frame that starts
23994 // in the previous segment. Check for that case and ensure the start time is
23995 // accurate for the segment.
23996
23997 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
23998 return firstVideoFrameTimeForData;
23999 }
24000
24001 return videoTimingInfo.start;
24002 };
24003
24004 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
24005 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
24006
24007 if (!trackInfo) {
24008 this.error({
24009 message: 'No starting media returned, likely due to an unsupported media format.',
24010 blacklistDuration: Infinity
24011 });
24012 this.trigger('error');
24013 return;
24014 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
24015 // on each queue this loader is responsible for to ensure that the appends are
24016 // complete.
24017
24018
24019 var hasAudio = trackInfo.hasAudio,
24020 hasVideo = trackInfo.hasVideo,
24021 isMuxed = trackInfo.isMuxed;
24022 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
24023 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
24024 segmentInfo.waitingOnAppends = 0; // segments with no data
24025
24026 if (!segmentInfo.hasAppendedData_) {
24027 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
24028 // When there's no audio or video data in the segment, there's no audio or video
24029 // timing information.
24030 //
24031 // If there's no audio or video timing information, then the timestamp offset
24032 // can't be adjusted to the appropriate value for the transmuxer and source
24033 // buffers.
24034 //
24035 // Therefore, the next segment should be used to set the timestamp offset.
24036 this.isPendingTimestampOffset_ = true;
24037 } // override settings for metadata only segments
24038
24039
24040 segmentInfo.timingInfo = {
24041 start: 0
24042 };
24043 segmentInfo.waitingOnAppends++;
24044
24045 if (!this.isPendingTimestampOffset_) {
24046 // update the timestampoffset
24047 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
24048 // no video/audio data.
24049
24050 this.processMetadataQueue_();
24051 } // append is "done" instantly with no data.
24052
24053
24054 this.checkAppendsDone_(segmentInfo);
24055 return;
24056 } // Since source updater could call back synchronously, do the increments first.
24057
24058
24059 if (waitForVideo) {
24060 segmentInfo.waitingOnAppends++;
24061 }
24062
24063 if (waitForAudio) {
24064 segmentInfo.waitingOnAppends++;
24065 }
24066
24067 if (waitForVideo) {
24068 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
24069 }
24070
24071 if (waitForAudio) {
24072 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
24073 }
24074 };
24075
24076 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
24077 if (this.checkForAbort_(segmentInfo.requestId)) {
24078 return;
24079 }
24080
24081 segmentInfo.waitingOnAppends--;
24082
24083 if (segmentInfo.waitingOnAppends === 0) {
24084 this.handleAppendsDone_();
24085 }
24086 };
24087
24088 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
24089 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
24090
24091 if (illegalMediaSwitchError) {
24092 this.error({
24093 message: illegalMediaSwitchError,
24094 blacklistDuration: Infinity
24095 });
24096 this.trigger('error');
24097 return true;
24098 }
24099
24100 return false;
24101 };
24102
24103 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
24104 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
24105 // priority, timing-wise, so we must wait
24106 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
24107 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
24108 this.loaderType_ !== 'main') {
24109 return;
24110 }
24111
24112 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
24113 // the timing info here comes from video. In the event that the audio is longer than
24114 // the video, this will trim the start of the audio.
24115 // This also trims any offset from 0 at the beginning of the media
24116
24117 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
24118 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
24119 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
24120 timingInfo: segmentInfo.timingInfo
24121 }); // In the event that there are part segment downloads, each will try to update the
24122 // timestamp offset. Retaining this bit of state prevents us from updating in the
24123 // future (within the same segment), however, there may be a better way to handle it.
24124
24125 segmentInfo.changedTimestampOffset = true;
24126
24127 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
24128 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
24129 didChange = true;
24130 }
24131
24132 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
24133 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
24134 didChange = true;
24135 }
24136
24137 if (didChange) {
24138 this.trigger('timestampoffset');
24139 }
24140 };
24141
24142 _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
24143 var videoTimingInfo = _ref10.videoTimingInfo,
24144 audioTimingInfo = _ref10.audioTimingInfo,
24145 timingInfo = _ref10.timingInfo;
24146
24147 if (!this.useDtsForTimestampOffset_) {
24148 return timingInfo.start;
24149 }
24150
24151 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
24152 return videoTimingInfo.transmuxedDecodeStart;
24153 } // handle audio only
24154
24155
24156 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
24157 return audioTimingInfo.transmuxedDecodeStart;
24158 } // handle content not transmuxed (e.g., MP4)
24159
24160
24161 return timingInfo.start;
24162 };
24163
24164 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
24165 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
24166 var trackInfo = this.getMediaInfo_();
24167 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
24168 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
24169
24170 if (!prioritizedTimingInfo) {
24171 return;
24172 }
24173
24174 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
24175 // current example is the case of fmp4), so use the rough duration to calculate an
24176 // end time.
24177 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
24178 }
24179 /**
24180 * callback to run when appendBuffer is finished. detects if we are
24181 * in a good state to do things with the data we got, or if we need
24182 * to wait for more
24183 *
24184 * @private
24185 */
24186 ;
24187
24188 _proto.handleAppendsDone_ = function handleAppendsDone_() {
24189 // appendsdone can cause an abort
24190 if (this.pendingSegment_) {
24191 this.trigger('appendsdone');
24192 }
24193
24194 if (!this.pendingSegment_) {
24195 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
24196 // all appending cases?
24197
24198 if (!this.paused()) {
24199 this.monitorBuffer_();
24200 }
24201
24202 return;
24203 }
24204
24205 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
24206 // best to wait until all appends are done so we're sure that the primary media is
24207 // finished (and we have its end time).
24208
24209 this.updateTimingInfoEnd_(segmentInfo);
24210
24211 if (this.shouldSaveSegmentTimingInfo_) {
24212 // Timeline mappings should only be saved for the main loader. This is for multiple
24213 // reasons:
24214 //
24215 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
24216 // and the main loader try to save the timeline mapping, whichever comes later
24217 // will overwrite the first. In theory this is OK, as the mappings should be the
24218 // same, however, it breaks for (2)
24219 // 2) In the event of a live stream, the initial live point will make for a somewhat
24220 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
24221 // the mapping will be off for one of the streams, dependent on which one was
24222 // first saved (see (1)).
24223 // 3) Primary timing goes by video in VHS, so the mapping should be video.
24224 //
24225 // Since the audio loader will wait for the main loader to load the first segment,
24226 // the main loader will save the first timeline mapping, and ensure that there won't
24227 // be a case where audio loads two segments without saving a mapping (thus leading
24228 // to missing segment timing info).
24229 this.syncController_.saveSegmentTimingInfo({
24230 segmentInfo: segmentInfo,
24231 shouldSaveTimelineMapping: this.loaderType_ === 'main'
24232 });
24233 }
24234
24235 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
24236
24237 if (segmentDurationMessage) {
24238 if (segmentDurationMessage.severity === 'warn') {
24239 videojs__default["default"].log.warn(segmentDurationMessage.message);
24240 } else {
24241 this.logger_(segmentDurationMessage.message);
24242 }
24243 }
24244
24245 this.recordThroughput_(segmentInfo);
24246 this.pendingSegment_ = null;
24247 this.state = 'READY';
24248
24249 if (segmentInfo.isSyncRequest) {
24250 this.trigger('syncinfoupdate'); // if the sync request was not appended
24251 // then it was not the correct segment.
24252 // throw it away and use the data it gave us
24253 // to get the correct one.
24254
24255 if (!segmentInfo.hasAppendedData_) {
24256 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
24257 return;
24258 }
24259 }
24260
24261 this.logger_("Appended " + segmentInfoString(segmentInfo));
24262 this.addSegmentMetadataCue_(segmentInfo);
24263 this.fetchAtBuffer_ = true;
24264
24265 if (this.currentTimeline_ !== segmentInfo.timeline) {
24266 this.timelineChangeController_.lastTimelineChange({
24267 type: this.loaderType_,
24268 from: this.currentTimeline_,
24269 to: segmentInfo.timeline
24270 }); // If audio is not disabled, the main segment loader is responsible for updating
24271 // the audio timeline as well. If the content is video only, this won't have any
24272 // impact.
24273
24274 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
24275 this.timelineChangeController_.lastTimelineChange({
24276 type: 'audio',
24277 from: this.currentTimeline_,
24278 to: segmentInfo.timeline
24279 });
24280 }
24281 }
24282
24283 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
24284 // the following conditional otherwise it may consider this a bad "guess"
24285 // and attempt to resync when the post-update seekable window and live
24286 // point would mean that this was the perfect segment to fetch
24287
24288 this.trigger('syncinfoupdate');
24289 var segment = segmentInfo.segment;
24290 var part = segmentInfo.part;
24291 var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
24292 var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
24293 // the currentTime_ that means that our conservative guess was too conservative.
24294 // In that case, reset the loader state so that we try to use any information gained
24295 // from the previous request to create a new, more accurate, sync-point.
24296
24297 if (badSegmentGuess || badPartGuess) {
24298 this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
24299 this.resetEverything();
24300 return;
24301 }
24302
24303 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
24304 // and conservatively guess
24305
24306 if (isWalkingForward) {
24307 this.trigger('bandwidthupdate');
24308 }
24309
24310 this.trigger('progress');
24311 this.mediaIndex = segmentInfo.mediaIndex;
24312 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
24313 // buffer, end the stream. this ensures the "ended" event will
24314 // fire if playback reaches that point.
24315
24316 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
24317 this.endOfStream();
24318 } // used for testing
24319
24320
24321 this.trigger('appended');
24322
24323 if (segmentInfo.hasAppendedData_) {
24324 this.mediaAppends++;
24325 }
24326
24327 if (!this.paused()) {
24328 this.monitorBuffer_();
24329 }
24330 }
24331 /**
24332 * Records the current throughput of the decrypt, transmux, and append
24333 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
24334 * moving average of the throughput. `throughput.count` is the number of
24335 * data points in the average.
24336 *
24337 * @private
24338 * @param {Object} segmentInfo the object returned by loadSegment
24339 */
24340 ;
24341
24342 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
24343 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
24344 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
24345 return;
24346 }
24347
24348 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
24349 // by zero in the case where the throughput is ridiculously high
24350
24351 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
24352
24353 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
24354 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
24355
24356 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
24357 }
24358 /**
24359 * Adds a cue to the segment-metadata track with some metadata information about the
24360 * segment
24361 *
24362 * @private
24363 * @param {Object} segmentInfo
24364 * the object returned by loadSegment
24365 * @method addSegmentMetadataCue_
24366 */
24367 ;
24368
24369 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
24370 if (!this.segmentMetadataTrack_) {
24371 return;
24372 }
24373
24374 var segment = segmentInfo.segment;
24375 var start = segment.start;
24376 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
24377
24378 if (!finite(start) || !finite(end)) {
24379 return;
24380 }
24381
24382 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
24383 var Cue = window.WebKitDataCue || window.VTTCue;
24384 var value = {
24385 custom: segment.custom,
24386 dateTimeObject: segment.dateTimeObject,
24387 dateTimeString: segment.dateTimeString,
24388 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
24389 resolution: segmentInfo.playlist.attributes.RESOLUTION,
24390 codecs: segmentInfo.playlist.attributes.CODECS,
24391 byteLength: segmentInfo.byteLength,
24392 uri: segmentInfo.uri,
24393 timeline: segmentInfo.timeline,
24394 playlist: segmentInfo.playlist.id,
24395 start: start,
24396 end: end
24397 };
24398 var data = JSON.stringify(value);
24399 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
24400 // the differences of WebKitDataCue in safari and VTTCue in other browsers
24401
24402 cue.value = value;
24403 this.segmentMetadataTrack_.addCue(cue);
24404 };
24405
24406 return SegmentLoader;
24407 }(videojs__default["default"].EventTarget);
24408
24409 function noop() {}
24410
24411 var toTitleCase = function toTitleCase(string) {
24412 if (typeof string !== 'string') {
24413 return string;
24414 }
24415
24416 return string.replace(/./, function (w) {
24417 return w.toUpperCase();
24418 });
24419 };
24420
24421 var bufferTypes = ['video', 'audio'];
24422
24423 var _updating = function updating(type, sourceUpdater) {
24424 var sourceBuffer = sourceUpdater[type + "Buffer"];
24425 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
24426 };
24427
24428 var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
24429 for (var i = 0; i < queue.length; i++) {
24430 var queueEntry = queue[i];
24431
24432 if (queueEntry.type === 'mediaSource') {
24433 // If the next entry is a media source entry (uses multiple source buffers), block
24434 // processing to allow it to go through first.
24435 return null;
24436 }
24437
24438 if (queueEntry.type === type) {
24439 return i;
24440 }
24441 }
24442
24443 return null;
24444 };
24445
24446 var shiftQueue = function shiftQueue(type, sourceUpdater) {
24447 if (sourceUpdater.queue.length === 0) {
24448 return;
24449 }
24450
24451 var queueIndex = 0;
24452 var queueEntry = sourceUpdater.queue[queueIndex];
24453
24454 if (queueEntry.type === 'mediaSource') {
24455 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
24456 sourceUpdater.queue.shift();
24457 queueEntry.action(sourceUpdater);
24458
24459 if (queueEntry.doneFn) {
24460 queueEntry.doneFn();
24461 } // Only specific source buffer actions must wait for async updateend events. Media
24462 // Source actions process synchronously. Therefore, both audio and video source
24463 // buffers are now clear to process the next queue entries.
24464
24465
24466 shiftQueue('audio', sourceUpdater);
24467 shiftQueue('video', sourceUpdater);
24468 } // Media Source actions require both source buffers, so if the media source action
24469 // couldn't process yet (because one or both source buffers are busy), block other
24470 // queue actions until both are available and the media source action can process.
24471
24472
24473 return;
24474 }
24475
24476 if (type === 'mediaSource') {
24477 // If the queue was shifted by a media source action (this happens when pushing a
24478 // media source action onto the queue), then it wasn't from an updateend event from an
24479 // audio or video source buffer, so there's no change from previous state, and no
24480 // processing should be done.
24481 return;
24482 } // Media source queue entries don't need to consider whether the source updater is
24483 // started (i.e., source buffers are created) as they don't need the source buffers, but
24484 // source buffer queue entries do.
24485
24486
24487 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
24488 return;
24489 }
24490
24491 if (queueEntry.type !== type) {
24492 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
24493
24494 if (queueIndex === null) {
24495 // Either there's no queue entry that uses this source buffer type in the queue, or
24496 // there's a media source queue entry before the next entry of this type, in which
24497 // case wait for that action to process first.
24498 return;
24499 }
24500
24501 queueEntry = sourceUpdater.queue[queueIndex];
24502 }
24503
24504 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
24505 //
24506 // The queue pending operation must be set before the action is performed in the event
24507 // that the action results in a synchronous event that is acted upon. For instance, if
24508 // an exception is thrown that can be handled, it's possible that new actions will be
24509 // appended to an empty queue and immediately executed, but would not have the correct
24510 // pending information if this property was set after the action was performed.
24511
24512 sourceUpdater.queuePending[type] = queueEntry;
24513 queueEntry.action(type, sourceUpdater);
24514
24515 if (!queueEntry.doneFn) {
24516 // synchronous operation, process next entry
24517 sourceUpdater.queuePending[type] = null;
24518 shiftQueue(type, sourceUpdater);
24519 return;
24520 }
24521 };
24522
24523 var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
24524 var buffer = sourceUpdater[type + "Buffer"];
24525 var titleType = toTitleCase(type);
24526
24527 if (!buffer) {
24528 return;
24529 }
24530
24531 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24532 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24533 sourceUpdater.codecs[type] = null;
24534 sourceUpdater[type + "Buffer"] = null;
24535 };
24536
24537 var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
24538 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
24539 };
24540
24541 var actions = {
24542 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
24543 return function (type, sourceUpdater) {
24544 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24545 // or the media source does not contain this source buffer.
24546
24547 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24548 return;
24549 }
24550
24551 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
24552
24553 try {
24554 sourceBuffer.appendBuffer(bytes);
24555 } catch (e) {
24556 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
24557 sourceUpdater.queuePending[type] = null;
24558 onError(e);
24559 }
24560 };
24561 },
24562 remove: function remove(start, end) {
24563 return function (type, sourceUpdater) {
24564 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24565 // or the media source does not contain this source buffer.
24566
24567 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24568 return;
24569 }
24570
24571 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
24572
24573 try {
24574 sourceBuffer.remove(start, end);
24575 } catch (e) {
24576 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
24577 }
24578 };
24579 },
24580 timestampOffset: function timestampOffset(offset) {
24581 return function (type, sourceUpdater) {
24582 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24583 // or the media source does not contain this source buffer.
24584
24585 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24586 return;
24587 }
24588
24589 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
24590 sourceBuffer.timestampOffset = offset;
24591 };
24592 },
24593 callback: function callback(_callback) {
24594 return function (type, sourceUpdater) {
24595 _callback();
24596 };
24597 },
24598 endOfStream: function endOfStream(error) {
24599 return function (sourceUpdater) {
24600 if (sourceUpdater.mediaSource.readyState !== 'open') {
24601 return;
24602 }
24603
24604 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
24605
24606 try {
24607 sourceUpdater.mediaSource.endOfStream(error);
24608 } catch (e) {
24609 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
24610 }
24611 };
24612 },
24613 duration: function duration(_duration) {
24614 return function (sourceUpdater) {
24615 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
24616
24617 try {
24618 sourceUpdater.mediaSource.duration = _duration;
24619 } catch (e) {
24620 videojs__default["default"].log.warn('Failed to set media source duration', e);
24621 }
24622 };
24623 },
24624 abort: function abort() {
24625 return function (type, sourceUpdater) {
24626 if (sourceUpdater.mediaSource.readyState !== 'open') {
24627 return;
24628 }
24629
24630 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
24631 // or the media source does not contain this source buffer.
24632
24633 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24634 return;
24635 }
24636
24637 sourceUpdater.logger_("calling abort on " + type + "Buffer");
24638
24639 try {
24640 sourceBuffer.abort();
24641 } catch (e) {
24642 videojs__default["default"].log.warn("Failed to abort on " + type + "Buffer", e);
24643 }
24644 };
24645 },
24646 addSourceBuffer: function addSourceBuffer(type, codec) {
24647 return function (sourceUpdater) {
24648 var titleType = toTitleCase(type);
24649 var mime = getMimeForCodec(codec);
24650 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
24651 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
24652 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
24653 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
24654 sourceUpdater.codecs[type] = codec;
24655 sourceUpdater[type + "Buffer"] = sourceBuffer;
24656 };
24657 },
24658 removeSourceBuffer: function removeSourceBuffer(type) {
24659 return function (sourceUpdater) {
24660 var sourceBuffer = sourceUpdater[type + "Buffer"];
24661 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
24662 // or the media source does not contain this source buffer.
24663
24664 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24665 return;
24666 }
24667
24668 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
24669
24670 try {
24671 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
24672 } catch (e) {
24673 videojs__default["default"].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
24674 }
24675 };
24676 },
24677 changeType: function changeType(codec) {
24678 return function (type, sourceUpdater) {
24679 var sourceBuffer = sourceUpdater[type + "Buffer"];
24680 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
24681 // or the media source does not contain this source buffer.
24682
24683 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
24684 return;
24685 } // do not update codec if we don't need to.
24686
24687
24688 if (sourceUpdater.codecs[type] === codec) {
24689 return;
24690 }
24691
24692 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
24693 sourceBuffer.changeType(mime);
24694 sourceUpdater.codecs[type] = codec;
24695 };
24696 }
24697 };
24698
24699 var pushQueue = function pushQueue(_ref) {
24700 var type = _ref.type,
24701 sourceUpdater = _ref.sourceUpdater,
24702 action = _ref.action,
24703 doneFn = _ref.doneFn,
24704 name = _ref.name;
24705 sourceUpdater.queue.push({
24706 type: type,
24707 action: action,
24708 doneFn: doneFn,
24709 name: name
24710 });
24711 shiftQueue(type, sourceUpdater);
24712 };
24713
24714 var onUpdateend = function onUpdateend(type, sourceUpdater) {
24715 return function (e) {
24716 // Although there should, in theory, be a pending action for any updateend receieved,
24717 // there are some actions that may trigger updateend events without set definitions in
24718 // the w3c spec. For instance, setting the duration on the media source may trigger
24719 // updateend events on source buffers. This does not appear to be in the spec. As such,
24720 // if we encounter an updateend without a corresponding pending action from our queue
24721 // for that source buffer type, process the next action.
24722 if (sourceUpdater.queuePending[type]) {
24723 var doneFn = sourceUpdater.queuePending[type].doneFn;
24724 sourceUpdater.queuePending[type] = null;
24725
24726 if (doneFn) {
24727 // if there's an error, report it
24728 doneFn(sourceUpdater[type + "Error_"]);
24729 }
24730 }
24731
24732 shiftQueue(type, sourceUpdater);
24733 };
24734 };
24735 /**
24736 * A queue of callbacks to be serialized and applied when a
24737 * MediaSource and its associated SourceBuffers are not in the
24738 * updating state. It is used by the segment loader to update the
24739 * underlying SourceBuffers when new data is loaded, for instance.
24740 *
24741 * @class SourceUpdater
24742 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
24743 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
24744 */
24745
24746
24747 var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
24748 inheritsLoose(SourceUpdater, _videojs$EventTarget);
24749
24750 function SourceUpdater(mediaSource) {
24751 var _this;
24752
24753 _this = _videojs$EventTarget.call(this) || this;
24754 _this.mediaSource = mediaSource;
24755
24756 _this.sourceopenListener_ = function () {
24757 return shiftQueue('mediaSource', assertThisInitialized(_this));
24758 };
24759
24760 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
24761
24762 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
24763
24764 _this.audioTimestampOffset_ = 0;
24765 _this.videoTimestampOffset_ = 0;
24766 _this.queue = [];
24767 _this.queuePending = {
24768 audio: null,
24769 video: null
24770 };
24771 _this.delayedAudioAppendQueue_ = [];
24772 _this.videoAppendQueued_ = false;
24773 _this.codecs = {};
24774 _this.onVideoUpdateEnd_ = onUpdateend('video', assertThisInitialized(_this));
24775 _this.onAudioUpdateEnd_ = onUpdateend('audio', assertThisInitialized(_this));
24776
24777 _this.onVideoError_ = function (e) {
24778 // used for debugging
24779 _this.videoError_ = e;
24780 };
24781
24782 _this.onAudioError_ = function (e) {
24783 // used for debugging
24784 _this.audioError_ = e;
24785 };
24786
24787 _this.createdSourceBuffers_ = false;
24788 _this.initializedEme_ = false;
24789 _this.triggeredReady_ = false;
24790 return _this;
24791 }
24792
24793 var _proto = SourceUpdater.prototype;
24794
24795 _proto.initializedEme = function initializedEme() {
24796 this.initializedEme_ = true;
24797 this.triggerReady();
24798 };
24799
24800 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
24801 // if false, likely waiting on one of the segment loaders to get enough data to create
24802 // source buffers
24803 return this.createdSourceBuffers_;
24804 };
24805
24806 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
24807 return this.initializedEme_;
24808 };
24809
24810 _proto.ready = function ready() {
24811 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
24812 };
24813
24814 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
24815 if (this.hasCreatedSourceBuffers()) {
24816 // already created them before
24817 return;
24818 } // the intial addOrChangeSourceBuffers will always be
24819 // two add buffers.
24820
24821
24822 this.addOrChangeSourceBuffers(codecs);
24823 this.createdSourceBuffers_ = true;
24824 this.trigger('createdsourcebuffers');
24825 this.triggerReady();
24826 };
24827
24828 _proto.triggerReady = function triggerReady() {
24829 // only allow ready to be triggered once, this prevents the case
24830 // where:
24831 // 1. we trigger createdsourcebuffers
24832 // 2. ie 11 synchronously initializates eme
24833 // 3. the synchronous initialization causes us to trigger ready
24834 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
24835 if (this.ready() && !this.triggeredReady_) {
24836 this.triggeredReady_ = true;
24837 this.trigger('ready');
24838 }
24839 }
24840 /**
24841 * Add a type of source buffer to the media source.
24842 *
24843 * @param {string} type
24844 * The type of source buffer to add.
24845 *
24846 * @param {string} codec
24847 * The codec to add the source buffer with.
24848 */
24849 ;
24850
24851 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
24852 pushQueue({
24853 type: 'mediaSource',
24854 sourceUpdater: this,
24855 action: actions.addSourceBuffer(type, codec),
24856 name: 'addSourceBuffer'
24857 });
24858 }
24859 /**
24860 * call abort on a source buffer.
24861 *
24862 * @param {string} type
24863 * The type of source buffer to call abort on.
24864 */
24865 ;
24866
24867 _proto.abort = function abort(type) {
24868 pushQueue({
24869 type: type,
24870 sourceUpdater: this,
24871 action: actions.abort(type),
24872 name: 'abort'
24873 });
24874 }
24875 /**
24876 * Call removeSourceBuffer and remove a specific type
24877 * of source buffer on the mediaSource.
24878 *
24879 * @param {string} type
24880 * The type of source buffer to remove.
24881 */
24882 ;
24883
24884 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
24885 if (!this.canRemoveSourceBuffer()) {
24886 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
24887 return;
24888 }
24889
24890 pushQueue({
24891 type: 'mediaSource',
24892 sourceUpdater: this,
24893 action: actions.removeSourceBuffer(type),
24894 name: 'removeSourceBuffer'
24895 });
24896 }
24897 /**
24898 * Whether or not the removeSourceBuffer function is supported
24899 * on the mediaSource.
24900 *
24901 * @return {boolean}
24902 * if removeSourceBuffer can be called.
24903 */
24904 ;
24905
24906 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
24907 // IE reports that it supports removeSourceBuffer, but often throws
24908 // errors when attempting to use the function. So we report that it
24909 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
24910 // throws errors, so we report that it does not support this as well.
24911 return !videojs__default["default"].browser.IE_VERSION && !videojs__default["default"].browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
24912 }
24913 /**
24914 * Whether or not the changeType function is supported
24915 * on our SourceBuffers.
24916 *
24917 * @return {boolean}
24918 * if changeType can be called.
24919 */
24920 ;
24921
24922 SourceUpdater.canChangeType = function canChangeType() {
24923 return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';
24924 }
24925 /**
24926 * Whether or not the changeType function is supported
24927 * on our SourceBuffers.
24928 *
24929 * @return {boolean}
24930 * if changeType can be called.
24931 */
24932 ;
24933
24934 _proto.canChangeType = function canChangeType() {
24935 return this.constructor.canChangeType();
24936 }
24937 /**
24938 * Call the changeType function on a source buffer, given the code and type.
24939 *
24940 * @param {string} type
24941 * The type of source buffer to call changeType on.
24942 *
24943 * @param {string} codec
24944 * The codec string to change type with on the source buffer.
24945 */
24946 ;
24947
24948 _proto.changeType = function changeType(type, codec) {
24949 if (!this.canChangeType()) {
24950 videojs__default["default"].log.error('changeType is not supported!');
24951 return;
24952 }
24953
24954 pushQueue({
24955 type: type,
24956 sourceUpdater: this,
24957 action: actions.changeType(codec),
24958 name: 'changeType'
24959 });
24960 }
24961 /**
24962 * Add source buffers with a codec or, if they are already created,
24963 * call changeType on source buffers using changeType.
24964 *
24965 * @param {Object} codecs
24966 * Codecs to switch to
24967 */
24968 ;
24969
24970 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
24971 var _this2 = this;
24972
24973 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
24974 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
24975 }
24976
24977 Object.keys(codecs).forEach(function (type) {
24978 var codec = codecs[type];
24979
24980 if (!_this2.hasCreatedSourceBuffers()) {
24981 return _this2.addSourceBuffer(type, codec);
24982 }
24983
24984 if (_this2.canChangeType()) {
24985 _this2.changeType(type, codec);
24986 }
24987 });
24988 }
24989 /**
24990 * Queue an update to append an ArrayBuffer.
24991 *
24992 * @param {MediaObject} object containing audioBytes and/or videoBytes
24993 * @param {Function} done the function to call when done
24994 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
24995 */
24996 ;
24997
24998 _proto.appendBuffer = function appendBuffer(options, doneFn) {
24999 var _this3 = this;
25000
25001 var segmentInfo = options.segmentInfo,
25002 type = options.type,
25003 bytes = options.bytes;
25004 this.processedAppend_ = true;
25005
25006 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
25007 this.delayedAudioAppendQueue_.push([options, doneFn]);
25008 this.logger_("delayed audio append of " + bytes.length + " until video append");
25009 return;
25010 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
25011 // not be fired. This means that the queue will be blocked until the next action
25012 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
25013 // these errors by calling the doneFn with the specific error.
25014
25015
25016 var onError = doneFn;
25017 pushQueue({
25018 type: type,
25019 sourceUpdater: this,
25020 action: actions.appendBuffer(bytes, segmentInfo || {
25021 mediaIndex: -1
25022 }, onError),
25023 doneFn: doneFn,
25024 name: 'appendBuffer'
25025 });
25026
25027 if (type === 'video') {
25028 this.videoAppendQueued_ = true;
25029
25030 if (!this.delayedAudioAppendQueue_.length) {
25031 return;
25032 }
25033
25034 var queue = this.delayedAudioAppendQueue_.slice();
25035 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
25036 this.delayedAudioAppendQueue_.length = 0;
25037 queue.forEach(function (que) {
25038 _this3.appendBuffer.apply(_this3, que);
25039 });
25040 }
25041 }
25042 /**
25043 * Get the audio buffer's buffered timerange.
25044 *
25045 * @return {TimeRange}
25046 * The audio buffer's buffered time range
25047 */
25048 ;
25049
25050 _proto.audioBuffered = function audioBuffered() {
25051 // no media source/source buffer or it isn't in the media sources
25052 // source buffer list
25053 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
25054 return videojs__default["default"].createTimeRange();
25055 }
25056
25057 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default["default"].createTimeRange();
25058 }
25059 /**
25060 * Get the video buffer's buffered timerange.
25061 *
25062 * @return {TimeRange}
25063 * The video buffer's buffered time range
25064 */
25065 ;
25066
25067 _proto.videoBuffered = function videoBuffered() {
25068 // no media source/source buffer or it isn't in the media sources
25069 // source buffer list
25070 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
25071 return videojs__default["default"].createTimeRange();
25072 }
25073
25074 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default["default"].createTimeRange();
25075 }
25076 /**
25077 * Get a combined video/audio buffer's buffered timerange.
25078 *
25079 * @return {TimeRange}
25080 * the combined time range
25081 */
25082 ;
25083
25084 _proto.buffered = function buffered() {
25085 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
25086 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
25087
25088 if (audio && !video) {
25089 return this.audioBuffered();
25090 }
25091
25092 if (video && !audio) {
25093 return this.videoBuffered();
25094 }
25095
25096 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
25097 }
25098 /**
25099 * Add a callback to the queue that will set duration on the mediaSource.
25100 *
25101 * @param {number} duration
25102 * The duration to set
25103 *
25104 * @param {Function} [doneFn]
25105 * function to run after duration has been set.
25106 */
25107 ;
25108
25109 _proto.setDuration = function setDuration(duration, doneFn) {
25110 if (doneFn === void 0) {
25111 doneFn = noop;
25112 }
25113
25114 // In order to set the duration on the media source, it's necessary to wait for all
25115 // source buffers to no longer be updating. "If the updating attribute equals true on
25116 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25117 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25118 pushQueue({
25119 type: 'mediaSource',
25120 sourceUpdater: this,
25121 action: actions.duration(duration),
25122 name: 'duration',
25123 doneFn: doneFn
25124 });
25125 }
25126 /**
25127 * Add a mediaSource endOfStream call to the queue
25128 *
25129 * @param {Error} [error]
25130 * Call endOfStream with an error
25131 *
25132 * @param {Function} [doneFn]
25133 * A function that should be called when the
25134 * endOfStream call has finished.
25135 */
25136 ;
25137
25138 _proto.endOfStream = function endOfStream(error, doneFn) {
25139 if (error === void 0) {
25140 error = null;
25141 }
25142
25143 if (doneFn === void 0) {
25144 doneFn = noop;
25145 }
25146
25147 if (typeof error !== 'string') {
25148 error = undefined;
25149 } // In order to set the duration on the media source, it's necessary to wait for all
25150 // source buffers to no longer be updating. "If the updating attribute equals true on
25151 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
25152 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
25153
25154
25155 pushQueue({
25156 type: 'mediaSource',
25157 sourceUpdater: this,
25158 action: actions.endOfStream(error),
25159 name: 'endOfStream',
25160 doneFn: doneFn
25161 });
25162 }
25163 /**
25164 * Queue an update to remove a time range from the buffer.
25165 *
25166 * @param {number} start where to start the removal
25167 * @param {number} end where to end the removal
25168 * @param {Function} [done=noop] optional callback to be executed when the remove
25169 * operation is complete
25170 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25171 */
25172 ;
25173
25174 _proto.removeAudio = function removeAudio(start, end, done) {
25175 if (done === void 0) {
25176 done = noop;
25177 }
25178
25179 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
25180 done();
25181 return;
25182 }
25183
25184 pushQueue({
25185 type: 'audio',
25186 sourceUpdater: this,
25187 action: actions.remove(start, end),
25188 doneFn: done,
25189 name: 'remove'
25190 });
25191 }
25192 /**
25193 * Queue an update to remove a time range from the buffer.
25194 *
25195 * @param {number} start where to start the removal
25196 * @param {number} end where to end the removal
25197 * @param {Function} [done=noop] optional callback to be executed when the remove
25198 * operation is complete
25199 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
25200 */
25201 ;
25202
25203 _proto.removeVideo = function removeVideo(start, end, done) {
25204 if (done === void 0) {
25205 done = noop;
25206 }
25207
25208 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
25209 done();
25210 return;
25211 }
25212
25213 pushQueue({
25214 type: 'video',
25215 sourceUpdater: this,
25216 action: actions.remove(start, end),
25217 doneFn: done,
25218 name: 'remove'
25219 });
25220 }
25221 /**
25222 * Whether the underlying sourceBuffer is updating or not
25223 *
25224 * @return {boolean} the updating status of the SourceBuffer
25225 */
25226 ;
25227
25228 _proto.updating = function updating() {
25229 // the audio/video source buffer is updating
25230 if (_updating('audio', this) || _updating('video', this)) {
25231 return true;
25232 }
25233
25234 return false;
25235 }
25236 /**
25237 * Set/get the timestampoffset on the audio SourceBuffer
25238 *
25239 * @return {number} the timestamp offset
25240 */
25241 ;
25242
25243 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
25244 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
25245 this.audioTimestampOffset_ !== offset) {
25246 pushQueue({
25247 type: 'audio',
25248 sourceUpdater: this,
25249 action: actions.timestampOffset(offset),
25250 name: 'timestampOffset'
25251 });
25252 this.audioTimestampOffset_ = offset;
25253 }
25254
25255 return this.audioTimestampOffset_;
25256 }
25257 /**
25258 * Set/get the timestampoffset on the video SourceBuffer
25259 *
25260 * @return {number} the timestamp offset
25261 */
25262 ;
25263
25264 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
25265 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
25266 this.videoTimestampOffset !== offset) {
25267 pushQueue({
25268 type: 'video',
25269 sourceUpdater: this,
25270 action: actions.timestampOffset(offset),
25271 name: 'timestampOffset'
25272 });
25273 this.videoTimestampOffset_ = offset;
25274 }
25275
25276 return this.videoTimestampOffset_;
25277 }
25278 /**
25279 * Add a function to the queue that will be called
25280 * when it is its turn to run in the audio queue.
25281 *
25282 * @param {Function} callback
25283 * The callback to queue.
25284 */
25285 ;
25286
25287 _proto.audioQueueCallback = function audioQueueCallback(callback) {
25288 if (!this.audioBuffer) {
25289 return;
25290 }
25291
25292 pushQueue({
25293 type: 'audio',
25294 sourceUpdater: this,
25295 action: actions.callback(callback),
25296 name: 'callback'
25297 });
25298 }
25299 /**
25300 * Add a function to the queue that will be called
25301 * when it is its turn to run in the video queue.
25302 *
25303 * @param {Function} callback
25304 * The callback to queue.
25305 */
25306 ;
25307
25308 _proto.videoQueueCallback = function videoQueueCallback(callback) {
25309 if (!this.videoBuffer) {
25310 return;
25311 }
25312
25313 pushQueue({
25314 type: 'video',
25315 sourceUpdater: this,
25316 action: actions.callback(callback),
25317 name: 'callback'
25318 });
25319 }
25320 /**
25321 * dispose of the source updater and the underlying sourceBuffer
25322 */
25323 ;
25324
25325 _proto.dispose = function dispose() {
25326 var _this4 = this;
25327
25328 this.trigger('dispose');
25329 bufferTypes.forEach(function (type) {
25330 _this4.abort(type);
25331
25332 if (_this4.canRemoveSourceBuffer()) {
25333 _this4.removeSourceBuffer(type);
25334 } else {
25335 _this4[type + "QueueCallback"](function () {
25336 return cleanupBuffer(type, _this4);
25337 });
25338 }
25339 });
25340 this.videoAppendQueued_ = false;
25341 this.delayedAudioAppendQueue_.length = 0;
25342
25343 if (this.sourceopenListener_) {
25344 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
25345 }
25346
25347 this.off();
25348 };
25349
25350 return SourceUpdater;
25351 }(videojs__default["default"].EventTarget);
25352
25353 var getPrototypeOf = createCommonjsModule(function (module) {
25354 function _getPrototypeOf(o) {
25355 module.exports = _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
25356 return o.__proto__ || Object.getPrototypeOf(o);
25357 };
25358 module.exports["default"] = module.exports, module.exports.__esModule = true;
25359 return _getPrototypeOf(o);
25360 }
25361
25362 module.exports = _getPrototypeOf;
25363 module.exports["default"] = module.exports, module.exports.__esModule = true;
25364 });
25365
25366 var isNativeFunction = createCommonjsModule(function (module) {
25367 function _isNativeFunction(fn) {
25368 return Function.toString.call(fn).indexOf("[native code]") !== -1;
25369 }
25370
25371 module.exports = _isNativeFunction;
25372 module.exports["default"] = module.exports, module.exports.__esModule = true;
25373 });
25374
25375 var isNativeReflectConstruct = createCommonjsModule(function (module) {
25376 function _isNativeReflectConstruct() {
25377 if (typeof Reflect === "undefined" || !Reflect.construct) return false;
25378 if (Reflect.construct.sham) return false;
25379 if (typeof Proxy === "function") return true;
25380
25381 try {
25382 Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {}));
25383 return true;
25384 } catch (e) {
25385 return false;
25386 }
25387 }
25388
25389 module.exports = _isNativeReflectConstruct;
25390 module.exports["default"] = module.exports, module.exports.__esModule = true;
25391 });
25392
25393 var construct = createCommonjsModule(function (module) {
25394 function _construct(Parent, args, Class) {
25395 if (isNativeReflectConstruct()) {
25396 module.exports = _construct = Reflect.construct;
25397 module.exports["default"] = module.exports, module.exports.__esModule = true;
25398 } else {
25399 module.exports = _construct = function _construct(Parent, args, Class) {
25400 var a = [null];
25401 a.push.apply(a, args);
25402 var Constructor = Function.bind.apply(Parent, a);
25403 var instance = new Constructor();
25404 if (Class) setPrototypeOf(instance, Class.prototype);
25405 return instance;
25406 };
25407
25408 module.exports["default"] = module.exports, module.exports.__esModule = true;
25409 }
25410
25411 return _construct.apply(null, arguments);
25412 }
25413
25414 module.exports = _construct;
25415 module.exports["default"] = module.exports, module.exports.__esModule = true;
25416 });
25417
25418 var wrapNativeSuper = createCommonjsModule(function (module) {
25419 function _wrapNativeSuper(Class) {
25420 var _cache = typeof Map === "function" ? new Map() : undefined;
25421
25422 module.exports = _wrapNativeSuper = function _wrapNativeSuper(Class) {
25423 if (Class === null || !isNativeFunction(Class)) return Class;
25424
25425 if (typeof Class !== "function") {
25426 throw new TypeError("Super expression must either be null or a function");
25427 }
25428
25429 if (typeof _cache !== "undefined") {
25430 if (_cache.has(Class)) return _cache.get(Class);
25431
25432 _cache.set(Class, Wrapper);
25433 }
25434
25435 function Wrapper() {
25436 return construct(Class, arguments, getPrototypeOf(this).constructor);
25437 }
25438
25439 Wrapper.prototype = Object.create(Class.prototype, {
25440 constructor: {
25441 value: Wrapper,
25442 enumerable: false,
25443 writable: true,
25444 configurable: true
25445 }
25446 });
25447 return setPrototypeOf(Wrapper, Class);
25448 };
25449
25450 module.exports["default"] = module.exports, module.exports.__esModule = true;
25451 return _wrapNativeSuper(Class);
25452 }
25453
25454 module.exports = _wrapNativeSuper;
25455 module.exports["default"] = module.exports, module.exports.__esModule = true;
25456 });
25457
25458 var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
25459 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
25460 };
25461
25462 var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
25463 return char.charCodeAt(0);
25464 }));
25465
25466 var NoVttJsError = /*#__PURE__*/function (_Error) {
25467 inheritsLoose(NoVttJsError, _Error);
25468
25469 function NoVttJsError() {
25470 return _Error.call(this, 'Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.') || this;
25471 }
25472
25473 return NoVttJsError;
25474 }( /*#__PURE__*/wrapNativeSuper(Error));
25475 /**
25476 * An object that manages segment loading and appending.
25477 *
25478 * @class VTTSegmentLoader
25479 * @param {Object} options required and optional options
25480 * @extends videojs.EventTarget
25481 */
25482
25483
25484 var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
25485 inheritsLoose(VTTSegmentLoader, _SegmentLoader);
25486
25487 function VTTSegmentLoader(settings, options) {
25488 var _this;
25489
25490 if (options === void 0) {
25491 options = {};
25492 }
25493
25494 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
25495 // however, VTTSegmentLoader has no need of a media source, so delete the reference
25496
25497 _this.mediaSource_ = null;
25498 _this.subtitlesTrack_ = null;
25499 _this.loaderType_ = 'subtitle';
25500 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
25501 _this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
25502 // the sync controller leads to improper behavior.
25503
25504 _this.shouldSaveSegmentTimingInfo_ = false;
25505 return _this;
25506 }
25507
25508 var _proto = VTTSegmentLoader.prototype;
25509
25510 _proto.createTransmuxer_ = function createTransmuxer_() {
25511 // don't need to transmux any subtitles
25512 return null;
25513 }
25514 /**
25515 * Indicates which time ranges are buffered
25516 *
25517 * @return {TimeRange}
25518 * TimeRange object representing the current buffered ranges
25519 */
25520 ;
25521
25522 _proto.buffered_ = function buffered_() {
25523 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
25524 return videojs__default["default"].createTimeRanges();
25525 }
25526
25527 var cues = this.subtitlesTrack_.cues;
25528 var start = cues[0].startTime;
25529 var end = cues[cues.length - 1].startTime;
25530 return videojs__default["default"].createTimeRanges([[start, end]]);
25531 }
25532 /**
25533 * Gets and sets init segment for the provided map
25534 *
25535 * @param {Object} map
25536 * The map object representing the init segment to get or set
25537 * @param {boolean=} set
25538 * If true, the init segment for the provided map should be saved
25539 * @return {Object}
25540 * map object for desired init segment
25541 */
25542 ;
25543
25544 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
25545 if (set === void 0) {
25546 set = false;
25547 }
25548
25549 if (!map) {
25550 return null;
25551 }
25552
25553 var id = initSegmentId(map);
25554 var storedMap = this.initSegments_[id];
25555
25556 if (set && !storedMap && map.bytes) {
25557 // append WebVTT line terminators to the media initialization segment if it exists
25558 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
25559 // requires two or more WebVTT line terminators between the WebVTT header and the
25560 // rest of the file
25561 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
25562 var combinedSegment = new Uint8Array(combinedByteLength);
25563 combinedSegment.set(map.bytes);
25564 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
25565 this.initSegments_[id] = storedMap = {
25566 resolvedUri: map.resolvedUri,
25567 byterange: map.byterange,
25568 bytes: combinedSegment
25569 };
25570 }
25571
25572 return storedMap || map;
25573 }
25574 /**
25575 * Returns true if all configuration required for loading is present, otherwise false.
25576 *
25577 * @return {boolean} True if the all configuration is ready for loading
25578 * @private
25579 */
25580 ;
25581
25582 _proto.couldBeginLoading_ = function couldBeginLoading_() {
25583 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
25584 }
25585 /**
25586 * Once all the starting parameters have been specified, begin
25587 * operation. This method should only be invoked from the INIT
25588 * state.
25589 *
25590 * @private
25591 */
25592 ;
25593
25594 _proto.init_ = function init_() {
25595 this.state = 'READY';
25596 this.resetEverything();
25597 return this.monitorBuffer_();
25598 }
25599 /**
25600 * Set a subtitle track on the segment loader to add subtitles to
25601 *
25602 * @param {TextTrack=} track
25603 * The text track to add loaded subtitles to
25604 * @return {TextTrack}
25605 * Returns the subtitles track
25606 */
25607 ;
25608
25609 _proto.track = function track(_track) {
25610 if (typeof _track === 'undefined') {
25611 return this.subtitlesTrack_;
25612 }
25613
25614 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
25615 // buffering now
25616
25617 if (this.state === 'INIT' && this.couldBeginLoading_()) {
25618 this.init_();
25619 }
25620
25621 return this.subtitlesTrack_;
25622 }
25623 /**
25624 * Remove any data in the source buffer between start and end times
25625 *
25626 * @param {number} start - the start time of the region to remove from the buffer
25627 * @param {number} end - the end time of the region to remove from the buffer
25628 */
25629 ;
25630
25631 _proto.remove = function remove(start, end) {
25632 removeCuesFromTrack(start, end, this.subtitlesTrack_);
25633 }
25634 /**
25635 * fill the buffer with segements unless the sourceBuffers are
25636 * currently updating
25637 *
25638 * Note: this function should only ever be called by monitorBuffer_
25639 * and never directly
25640 *
25641 * @private
25642 */
25643 ;
25644
25645 _proto.fillBuffer_ = function fillBuffer_() {
25646 var _this2 = this;
25647
25648 // see if we need to begin loading immediately
25649 var segmentInfo = this.chooseNextRequest_();
25650
25651 if (!segmentInfo) {
25652 return;
25653 }
25654
25655 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
25656 // We don't have the timestamp offset that we need to sync subtitles.
25657 // Rerun on a timestamp offset or user interaction.
25658 var checkTimestampOffset = function checkTimestampOffset() {
25659 _this2.state = 'READY';
25660
25661 if (!_this2.paused()) {
25662 // if not paused, queue a buffer check as soon as possible
25663 _this2.monitorBuffer_();
25664 }
25665 };
25666
25667 this.syncController_.one('timestampoffset', checkTimestampOffset);
25668 this.state = 'WAITING_ON_TIMELINE';
25669 return;
25670 }
25671
25672 this.loadSegment_(segmentInfo);
25673 } // never set a timestamp offset for vtt segments.
25674 ;
25675
25676 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
25677 return null;
25678 };
25679
25680 _proto.chooseNextRequest_ = function chooseNextRequest_() {
25681 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
25682 }
25683 /**
25684 * Prevents the segment loader from requesting segments we know contain no subtitles
25685 * by walking forward until we find the next segment that we don't know whether it is
25686 * empty or not.
25687 *
25688 * @param {Object} segmentInfo
25689 * a segment info object that describes the current segment
25690 * @return {Object}
25691 * a segment info object that describes the current segment
25692 */
25693 ;
25694
25695 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
25696 while (segmentInfo && segmentInfo.segment.empty) {
25697 // stop at the last possible segmentInfo
25698 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
25699 segmentInfo = null;
25700 break;
25701 }
25702
25703 segmentInfo = this.generateSegmentInfo_({
25704 playlist: segmentInfo.playlist,
25705 mediaIndex: segmentInfo.mediaIndex + 1,
25706 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
25707 isSyncRequest: segmentInfo.isSyncRequest
25708 });
25709 }
25710
25711 return segmentInfo;
25712 };
25713
25714 _proto.stopForError = function stopForError(error) {
25715 this.error(error);
25716 this.state = 'READY';
25717 this.pause();
25718 this.trigger('error');
25719 }
25720 /**
25721 * append a decrypted segement to the SourceBuffer through a SourceUpdater
25722 *
25723 * @private
25724 */
25725 ;
25726
25727 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
25728 var _this3 = this;
25729
25730 if (!this.subtitlesTrack_) {
25731 this.state = 'READY';
25732 return;
25733 }
25734
25735 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
25736
25737 if (!this.pendingSegment_) {
25738 this.state = 'READY';
25739 this.mediaRequestsAborted += 1;
25740 return;
25741 }
25742
25743 if (error) {
25744 if (error.code === REQUEST_ERRORS.TIMEOUT) {
25745 this.handleTimeout_();
25746 }
25747
25748 if (error.code === REQUEST_ERRORS.ABORTED) {
25749 this.mediaRequestsAborted += 1;
25750 } else {
25751 this.mediaRequestsErrored += 1;
25752 }
25753
25754 this.stopForError(error);
25755 return;
25756 }
25757
25758 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
25759 // maintain functionality between segment loaders
25760
25761 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cache
25762
25763 if (simpleSegment.key) {
25764 this.segmentKey(simpleSegment.key, true);
25765 }
25766
25767 this.state = 'APPENDING'; // used for tests
25768
25769 this.trigger('appending');
25770 var segment = segmentInfo.segment;
25771
25772 if (segment.map) {
25773 segment.map.bytes = simpleSegment.map.bytes;
25774 }
25775
25776 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
25777
25778 if (typeof window.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
25779 this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times
25780 // script will be loaded once but multiple listeners will be added to the queue, which is expected.
25781
25782 this.loadVttJs().then(function () {
25783 return _this3.segmentRequestFinished_(error, simpleSegment, result);
25784 }, function () {
25785 return _this3.stopForError({
25786 message: 'Error loading vtt.js'
25787 });
25788 });
25789 return;
25790 }
25791
25792 segment.requested = true;
25793
25794 try {
25795 this.parseVTTCues_(segmentInfo);
25796 } catch (e) {
25797 this.stopForError({
25798 message: e.message
25799 });
25800 return;
25801 }
25802
25803 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
25804
25805 if (segmentInfo.cues.length) {
25806 segmentInfo.timingInfo = {
25807 start: segmentInfo.cues[0].startTime,
25808 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
25809 };
25810 } else {
25811 segmentInfo.timingInfo = {
25812 start: segmentInfo.startOfSegment,
25813 end: segmentInfo.startOfSegment + segmentInfo.duration
25814 };
25815 }
25816
25817 if (segmentInfo.isSyncRequest) {
25818 this.trigger('syncinfoupdate');
25819 this.pendingSegment_ = null;
25820 this.state = 'READY';
25821 return;
25822 }
25823
25824 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
25825 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
25826 // the subtitle track
25827
25828 segmentInfo.cues.forEach(function (cue) {
25829 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
25830 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
25831 // cues to have identical time-intervals, but if the text is also identical
25832 // we can safely assume it is a duplicate that can be removed (ex. when a cue
25833 // "overlaps" VTT segments)
25834
25835 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
25836 this.handleAppendsDone_();
25837 };
25838
25839 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
25840 // that we do not support here.
25841 };
25842
25843 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
25844 }
25845 /**
25846 * Uses the WebVTT parser to parse the segment response
25847 *
25848 * @throws NoVttJsError
25849 *
25850 * @param {Object} segmentInfo
25851 * a segment info object that describes the current segment
25852 * @private
25853 */
25854 ;
25855
25856 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
25857 var decoder;
25858 var decodeBytesToString = false;
25859
25860 if (typeof window.WebVTT !== 'function') {
25861 // caller is responsible for exception handling.
25862 throw new NoVttJsError();
25863 }
25864
25865 if (typeof window.TextDecoder === 'function') {
25866 decoder = new window.TextDecoder('utf8');
25867 } else {
25868 decoder = window.WebVTT.StringDecoder();
25869 decodeBytesToString = true;
25870 }
25871
25872 var parser = new window.WebVTT.Parser(window, window.vttjs, decoder);
25873 segmentInfo.cues = [];
25874 segmentInfo.timestampmap = {
25875 MPEGTS: 0,
25876 LOCAL: 0
25877 };
25878 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
25879
25880 parser.ontimestampmap = function (map) {
25881 segmentInfo.timestampmap = map;
25882 };
25883
25884 parser.onparsingerror = function (error) {
25885 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
25886 };
25887
25888 if (segmentInfo.segment.map) {
25889 var mapData = segmentInfo.segment.map.bytes;
25890
25891 if (decodeBytesToString) {
25892 mapData = uint8ToUtf8(mapData);
25893 }
25894
25895 parser.parse(mapData);
25896 }
25897
25898 var segmentData = segmentInfo.bytes;
25899
25900 if (decodeBytesToString) {
25901 segmentData = uint8ToUtf8(segmentData);
25902 }
25903
25904 parser.parse(segmentData);
25905 parser.flush();
25906 }
25907 /**
25908 * Updates the start and end times of any cues parsed by the WebVTT parser using
25909 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
25910 * from the SyncController
25911 *
25912 * @param {Object} segmentInfo
25913 * a segment info object that describes the current segment
25914 * @param {Object} mappingObj
25915 * object containing a mapping from TS to media time
25916 * @param {Object} playlist
25917 * the playlist object containing the segment
25918 * @private
25919 */
25920 ;
25921
25922 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
25923 var segment = segmentInfo.segment;
25924
25925 if (!mappingObj) {
25926 // If the sync controller does not have a mapping of TS to Media Time for the
25927 // timeline, then we don't have enough information to update the cue
25928 // start/end times
25929 return;
25930 }
25931
25932 if (!segmentInfo.cues.length) {
25933 // If there are no cues, we also do not have enough information to figure out
25934 // segment timing. Mark that the segment contains no cues so we don't re-request
25935 // an empty segment.
25936 segment.empty = true;
25937 return;
25938 }
25939
25940 var timestampmap = segmentInfo.timestampmap;
25941 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
25942 segmentInfo.cues.forEach(function (cue) {
25943 // First convert cue time to TS time using the timestamp-map provided within the vtt
25944 cue.startTime += diff;
25945 cue.endTime += diff;
25946 });
25947
25948 if (!playlist.syncInfo) {
25949 var firstStart = segmentInfo.cues[0].startTime;
25950 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
25951 playlist.syncInfo = {
25952 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
25953 time: Math.min(firstStart, lastStart - segment.duration)
25954 };
25955 }
25956 };
25957
25958 return VTTSegmentLoader;
25959 }(SegmentLoader);
25960
25961 /**
25962 * @file ad-cue-tags.js
25963 */
25964
25965 /**
25966 * Searches for an ad cue that overlaps with the given mediaTime
25967 *
25968 * @param {Object} track
25969 * the track to find the cue for
25970 *
25971 * @param {number} mediaTime
25972 * the time to find the cue at
25973 *
25974 * @return {Object|null}
25975 * the found cue or null
25976 */
25977 var findAdCue = function findAdCue(track, mediaTime) {
25978 var cues = track.cues;
25979
25980 for (var i = 0; i < cues.length; i++) {
25981 var cue = cues[i];
25982
25983 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
25984 return cue;
25985 }
25986 }
25987
25988 return null;
25989 };
25990 var updateAdCues = function updateAdCues(media, track, offset) {
25991 if (offset === void 0) {
25992 offset = 0;
25993 }
25994
25995 if (!media.segments) {
25996 return;
25997 }
25998
25999 var mediaTime = offset;
26000 var cue;
26001
26002 for (var i = 0; i < media.segments.length; i++) {
26003 var segment = media.segments[i];
26004
26005 if (!cue) {
26006 // Since the cues will span for at least the segment duration, adding a fudge
26007 // factor of half segment duration will prevent duplicate cues from being
26008 // created when timing info is not exact (e.g. cue start time initialized
26009 // at 10.006677, but next call mediaTime is 10.003332 )
26010 cue = findAdCue(track, mediaTime + segment.duration / 2);
26011 }
26012
26013 if (cue) {
26014 if ('cueIn' in segment) {
26015 // Found a CUE-IN so end the cue
26016 cue.endTime = mediaTime;
26017 cue.adEndTime = mediaTime;
26018 mediaTime += segment.duration;
26019 cue = null;
26020 continue;
26021 }
26022
26023 if (mediaTime < cue.endTime) {
26024 // Already processed this mediaTime for this cue
26025 mediaTime += segment.duration;
26026 continue;
26027 } // otherwise extend cue until a CUE-IN is found
26028
26029
26030 cue.endTime += segment.duration;
26031 } else {
26032 if ('cueOut' in segment) {
26033 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
26034 cue.adStartTime = mediaTime; // Assumes tag format to be
26035 // #EXT-X-CUE-OUT:30
26036
26037 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
26038 track.addCue(cue);
26039 }
26040
26041 if ('cueOutCont' in segment) {
26042 // Entered into the middle of an ad cue
26043 // Assumes tag formate to be
26044 // #EXT-X-CUE-OUT-CONT:10/30
26045 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
26046 adOffset = _segment$cueOutCont$s[0],
26047 adTotal = _segment$cueOutCont$s[1];
26048
26049 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');
26050 cue.adStartTime = mediaTime - adOffset;
26051 cue.adEndTime = cue.adStartTime + adTotal;
26052 track.addCue(cue);
26053 }
26054 }
26055
26056 mediaTime += segment.duration;
26057 }
26058 };
26059
26060 // synchronize expired playlist segments.
26061 // the max media sequence diff is 48 hours of live stream
26062 // content with two second segments. Anything larger than that
26063 // will likely be invalid.
26064
26065 var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
26066 var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
26067 // the equivalence display-time 0 === segment-index 0
26068 {
26069 name: 'VOD',
26070 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26071 if (duration !== Infinity) {
26072 var syncPoint = {
26073 time: 0,
26074 segmentIndex: 0,
26075 partIndex: null
26076 };
26077 return syncPoint;
26078 }
26079
26080 return null;
26081 }
26082 }, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
26083 {
26084 name: 'ProgramDateTime',
26085 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26086 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
26087 return null;
26088 }
26089
26090 var syncPoint = null;
26091 var lastDistance = null;
26092 var partsAndSegments = getPartsAndSegments(playlist);
26093 currentTime = currentTime || 0;
26094
26095 for (var i = 0; i < partsAndSegments.length; i++) {
26096 // start from the end and loop backwards for live
26097 // or start from the front and loop forwards for non-live
26098 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
26099 var partAndSegment = partsAndSegments[index];
26100 var segment = partAndSegment.segment;
26101 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
26102
26103 if (!datetimeMapping || !segment.dateTimeObject) {
26104 continue;
26105 }
26106
26107 var segmentTime = segment.dateTimeObject.getTime() / 1000;
26108 var start = segmentTime + datetimeMapping; // take part duration into account.
26109
26110 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
26111 for (var z = 0; z < partAndSegment.partIndex; z++) {
26112 start += segment.parts[z].duration;
26113 }
26114 }
26115
26116 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
26117 // currentTime and can stop looking for better candidates
26118
26119 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
26120 break;
26121 }
26122
26123 lastDistance = distance;
26124 syncPoint = {
26125 time: start,
26126 segmentIndex: partAndSegment.segmentIndex,
26127 partIndex: partAndSegment.partIndex
26128 };
26129 }
26130
26131 return syncPoint;
26132 }
26133 }, // Stategy "Segment": We have a known time mapping for a timeline and a
26134 // segment in the current timeline with timing data
26135 {
26136 name: 'Segment',
26137 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26138 var syncPoint = null;
26139 var lastDistance = null;
26140 currentTime = currentTime || 0;
26141 var partsAndSegments = getPartsAndSegments(playlist);
26142
26143 for (var i = 0; i < partsAndSegments.length; i++) {
26144 // start from the end and loop backwards for live
26145 // or start from the front and loop forwards for non-live
26146 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
26147 var partAndSegment = partsAndSegments[index];
26148 var segment = partAndSegment.segment;
26149 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
26150
26151 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
26152 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
26153 // currentTime and can stop looking for better candidates
26154
26155 if (lastDistance !== null && lastDistance < distance) {
26156 break;
26157 }
26158
26159 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
26160 lastDistance = distance;
26161 syncPoint = {
26162 time: start,
26163 segmentIndex: partAndSegment.segmentIndex,
26164 partIndex: partAndSegment.partIndex
26165 };
26166 }
26167 }
26168 }
26169
26170 return syncPoint;
26171 }
26172 }, // Stategy "Discontinuity": We have a discontinuity with a known
26173 // display-time
26174 {
26175 name: 'Discontinuity',
26176 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26177 var syncPoint = null;
26178 currentTime = currentTime || 0;
26179
26180 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
26181 var lastDistance = null;
26182
26183 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
26184 var segmentIndex = playlist.discontinuityStarts[i];
26185 var discontinuity = playlist.discontinuitySequence + i + 1;
26186 var discontinuitySync = syncController.discontinuities[discontinuity];
26187
26188 if (discontinuitySync) {
26189 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
26190 // currentTime and can stop looking for better candidates
26191
26192 if (lastDistance !== null && lastDistance < distance) {
26193 break;
26194 }
26195
26196 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
26197 lastDistance = distance;
26198 syncPoint = {
26199 time: discontinuitySync.time,
26200 segmentIndex: segmentIndex,
26201 partIndex: null
26202 };
26203 }
26204 }
26205 }
26206 }
26207
26208 return syncPoint;
26209 }
26210 }, // Stategy "Playlist": We have a playlist with a known mapping of
26211 // segment index to display time
26212 {
26213 name: 'Playlist',
26214 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
26215 if (playlist.syncInfo) {
26216 var syncPoint = {
26217 time: playlist.syncInfo.time,
26218 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
26219 partIndex: null
26220 };
26221 return syncPoint;
26222 }
26223
26224 return null;
26225 }
26226 }];
26227
26228 var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
26229 inheritsLoose(SyncController, _videojs$EventTarget);
26230
26231 function SyncController(options) {
26232 var _this;
26233
26234 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
26235
26236 _this.timelines = [];
26237 _this.discontinuities = [];
26238 _this.timelineToDatetimeMappings = {};
26239 _this.logger_ = logger('SyncController');
26240 return _this;
26241 }
26242 /**
26243 * Find a sync-point for the playlist specified
26244 *
26245 * A sync-point is defined as a known mapping from display-time to
26246 * a segment-index in the current playlist.
26247 *
26248 * @param {Playlist} playlist
26249 * The playlist that needs a sync-point
26250 * @param {number} duration
26251 * Duration of the MediaSource (Infinite if playing a live source)
26252 * @param {number} currentTimeline
26253 * The last timeline from which a segment was loaded
26254 * @return {Object}
26255 * A sync-point object
26256 */
26257
26258
26259 var _proto = SyncController.prototype;
26260
26261 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
26262 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
26263
26264 if (!syncPoints.length) {
26265 // Signal that we need to attempt to get a sync-point manually
26266 // by fetching a segment in the playlist and constructing
26267 // a sync-point from that information
26268 return null;
26269 } // Now find the sync-point that is closest to the currentTime because
26270 // that should result in the most accurate guess about which segment
26271 // to fetch
26272
26273
26274 return this.selectSyncPoint_(syncPoints, {
26275 key: 'time',
26276 value: currentTime
26277 });
26278 }
26279 /**
26280 * Calculate the amount of time that has expired off the playlist during playback
26281 *
26282 * @param {Playlist} playlist
26283 * Playlist object to calculate expired from
26284 * @param {number} duration
26285 * Duration of the MediaSource (Infinity if playling a live source)
26286 * @return {number|null}
26287 * The amount of time that has expired off the playlist during playback. Null
26288 * if no sync-points for the playlist can be found.
26289 */
26290 ;
26291
26292 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
26293 if (!playlist || !playlist.segments) {
26294 return null;
26295 }
26296
26297 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
26298
26299 if (!syncPoints.length) {
26300 return null;
26301 }
26302
26303 var syncPoint = this.selectSyncPoint_(syncPoints, {
26304 key: 'segmentIndex',
26305 value: 0
26306 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
26307 // duration from index 0 to syncPoint.segmentIndex instead of adding.
26308
26309 if (syncPoint.segmentIndex > 0) {
26310 syncPoint.time *= -1;
26311 }
26312
26313 return Math.abs(syncPoint.time + sumDurations({
26314 defaultDuration: playlist.targetDuration,
26315 durationList: playlist.segments,
26316 startIndex: syncPoint.segmentIndex,
26317 endIndex: 0
26318 }));
26319 }
26320 /**
26321 * Runs each sync-point strategy and returns a list of sync-points returned by the
26322 * strategies
26323 *
26324 * @private
26325 * @param {Playlist} playlist
26326 * The playlist that needs a sync-point
26327 * @param {number} duration
26328 * Duration of the MediaSource (Infinity if playing a live source)
26329 * @param {number} currentTimeline
26330 * The last timeline from which a segment was loaded
26331 * @return {Array}
26332 * A list of sync-point objects
26333 */
26334 ;
26335
26336 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
26337 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
26338
26339 for (var i = 0; i < syncPointStrategies.length; i++) {
26340 var strategy = syncPointStrategies[i];
26341 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
26342
26343 if (syncPoint) {
26344 syncPoint.strategy = strategy.name;
26345 syncPoints.push({
26346 strategy: strategy.name,
26347 syncPoint: syncPoint
26348 });
26349 }
26350 }
26351
26352 return syncPoints;
26353 }
26354 /**
26355 * Selects the sync-point nearest the specified target
26356 *
26357 * @private
26358 * @param {Array} syncPoints
26359 * List of sync-points to select from
26360 * @param {Object} target
26361 * Object specifying the property and value we are targeting
26362 * @param {string} target.key
26363 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
26364 * @param {number} target.value
26365 * The value to target for the specified key.
26366 * @return {Object}
26367 * The sync-point nearest the target
26368 */
26369 ;
26370
26371 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
26372 var bestSyncPoint = syncPoints[0].syncPoint;
26373 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
26374 var bestStrategy = syncPoints[0].strategy;
26375
26376 for (var i = 1; i < syncPoints.length; i++) {
26377 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
26378
26379 if (newDistance < bestDistance) {
26380 bestDistance = newDistance;
26381 bestSyncPoint = syncPoints[i].syncPoint;
26382 bestStrategy = syncPoints[i].strategy;
26383 }
26384 }
26385
26386 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
26387 return bestSyncPoint;
26388 }
26389 /**
26390 * Save any meta-data present on the segments when segments leave
26391 * the live window to the playlist to allow for synchronization at the
26392 * playlist level later.
26393 *
26394 * @param {Playlist} oldPlaylist - The previous active playlist
26395 * @param {Playlist} newPlaylist - The updated and most current playlist
26396 */
26397 ;
26398
26399 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
26400 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
26401
26402 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
26403 videojs__default["default"].log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
26404 return;
26405 } // When a segment expires from the playlist and it has a start time
26406 // save that information as a possible sync-point reference in future
26407
26408
26409 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
26410 var lastRemovedSegment = oldPlaylist.segments[i];
26411
26412 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
26413 newPlaylist.syncInfo = {
26414 mediaSequence: oldPlaylist.mediaSequence + i,
26415 time: lastRemovedSegment.start
26416 };
26417 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
26418 this.trigger('syncinfoupdate');
26419 break;
26420 }
26421 }
26422 }
26423 /**
26424 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
26425 * before segments start to load.
26426 *
26427 * @param {Playlist} playlist - The currently active playlist
26428 */
26429 ;
26430
26431 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
26432 // It's possible for the playlist to be updated before playback starts, meaning time
26433 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
26434 // crossed, then the old time zero mapping (for the prior timeline) would be retained
26435 // unless the mappings are cleared.
26436 this.timelineToDatetimeMappings = {};
26437
26438 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
26439 var firstSegment = playlist.segments[0];
26440 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
26441 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
26442 }
26443 }
26444 /**
26445 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
26446 * based on the latest timing information.
26447 *
26448 * @param {Object} options
26449 * Options object
26450 * @param {SegmentInfo} options.segmentInfo
26451 * The current active request information
26452 * @param {boolean} options.shouldSaveTimelineMapping
26453 * If there's a timeline change, determines if the timeline mapping should be
26454 * saved for timeline mapping and program date time mappings.
26455 */
26456 ;
26457
26458 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
26459 var segmentInfo = _ref.segmentInfo,
26460 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
26461 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
26462 var segment = segmentInfo.segment;
26463
26464 if (didCalculateSegmentTimeMapping) {
26465 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
26466 // now with segment timing information
26467
26468 if (!segmentInfo.playlist.syncInfo) {
26469 segmentInfo.playlist.syncInfo = {
26470 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
26471 time: segment.start
26472 };
26473 }
26474 }
26475
26476 var dateTime = segment.dateTimeObject;
26477
26478 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
26479 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
26480 }
26481 };
26482
26483 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
26484 if (typeof this.timelines[timeline] === 'undefined') {
26485 return null;
26486 }
26487
26488 return this.timelines[timeline].time;
26489 };
26490
26491 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
26492 if (typeof this.timelines[timeline] === 'undefined') {
26493 return null;
26494 }
26495
26496 return this.timelines[timeline].mapping;
26497 }
26498 /**
26499 * Use the "media time" for a segment to generate a mapping to "display time" and
26500 * save that display time to the segment.
26501 *
26502 * @private
26503 * @param {SegmentInfo} segmentInfo
26504 * The current active request information
26505 * @param {Object} timingInfo
26506 * The start and end time of the current segment in "media time"
26507 * @param {boolean} shouldSaveTimelineMapping
26508 * If there's a timeline change, determines if the timeline mapping should be
26509 * saved in timelines.
26510 * @return {boolean}
26511 * Returns false if segment time mapping could not be calculated
26512 */
26513 ;
26514
26515 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
26516 // TODO: remove side effects
26517 var segment = segmentInfo.segment;
26518 var part = segmentInfo.part;
26519 var mappingObj = this.timelines[segmentInfo.timeline];
26520 var start;
26521 var end;
26522
26523 if (typeof segmentInfo.timestampOffset === 'number') {
26524 mappingObj = {
26525 time: segmentInfo.startOfSegment,
26526 mapping: segmentInfo.startOfSegment - timingInfo.start
26527 };
26528
26529 if (shouldSaveTimelineMapping) {
26530 this.timelines[segmentInfo.timeline] = mappingObj;
26531 this.trigger('timestampoffset');
26532 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
26533 }
26534
26535 start = segmentInfo.startOfSegment;
26536 end = timingInfo.end + mappingObj.mapping;
26537 } else if (mappingObj) {
26538 start = timingInfo.start + mappingObj.mapping;
26539 end = timingInfo.end + mappingObj.mapping;
26540 } else {
26541 return false;
26542 }
26543
26544 if (part) {
26545 part.start = start;
26546 part.end = end;
26547 } // If we don't have a segment start yet or the start value we got
26548 // is less than our current segment.start value, save a new start value.
26549 // We have to do this because parts will have segment timing info saved
26550 // multiple times and we want segment start to be the earliest part start
26551 // value for that segment.
26552
26553
26554 if (!segment.start || start < segment.start) {
26555 segment.start = start;
26556 }
26557
26558 segment.end = end;
26559 return true;
26560 }
26561 /**
26562 * Each time we have discontinuity in the playlist, attempt to calculate the location
26563 * in display of the start of the discontinuity and save that. We also save an accuracy
26564 * value so that we save values with the most accuracy (closest to 0.)
26565 *
26566 * @private
26567 * @param {SegmentInfo} segmentInfo - The current active request information
26568 */
26569 ;
26570
26571 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
26572 var playlist = segmentInfo.playlist;
26573 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
26574 // the start of the range and it's accuracy is 0 (greater accuracy values
26575 // mean more approximation)
26576
26577 if (segment.discontinuity) {
26578 this.discontinuities[segment.timeline] = {
26579 time: segment.start,
26580 accuracy: 0
26581 };
26582 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
26583 // Search for future discontinuities that we can provide better timing
26584 // information for and save that information for sync purposes
26585 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
26586 var segmentIndex = playlist.discontinuityStarts[i];
26587 var discontinuity = playlist.discontinuitySequence + i + 1;
26588 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
26589 var accuracy = Math.abs(mediaIndexDiff);
26590
26591 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
26592 var time = void 0;
26593
26594 if (mediaIndexDiff < 0) {
26595 time = segment.start - sumDurations({
26596 defaultDuration: playlist.targetDuration,
26597 durationList: playlist.segments,
26598 startIndex: segmentInfo.mediaIndex,
26599 endIndex: segmentIndex
26600 });
26601 } else {
26602 time = segment.end + sumDurations({
26603 defaultDuration: playlist.targetDuration,
26604 durationList: playlist.segments,
26605 startIndex: segmentInfo.mediaIndex + 1,
26606 endIndex: segmentIndex
26607 });
26608 }
26609
26610 this.discontinuities[discontinuity] = {
26611 time: time,
26612 accuracy: accuracy
26613 };
26614 }
26615 }
26616 }
26617 };
26618
26619 _proto.dispose = function dispose() {
26620 this.trigger('dispose');
26621 this.off();
26622 };
26623
26624 return SyncController;
26625 }(videojs__default["default"].EventTarget);
26626
26627 /**
26628 * The TimelineChangeController acts as a source for segment loaders to listen for and
26629 * keep track of latest and pending timeline changes. This is useful to ensure proper
26630 * sync, as each loader may need to make a consideration for what timeline the other
26631 * loader is on before making changes which could impact the other loader's media.
26632 *
26633 * @class TimelineChangeController
26634 * @extends videojs.EventTarget
26635 */
26636
26637 var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
26638 inheritsLoose(TimelineChangeController, _videojs$EventTarget);
26639
26640 function TimelineChangeController() {
26641 var _this;
26642
26643 _this = _videojs$EventTarget.call(this) || this;
26644 _this.pendingTimelineChanges_ = {};
26645 _this.lastTimelineChanges_ = {};
26646 return _this;
26647 }
26648
26649 var _proto = TimelineChangeController.prototype;
26650
26651 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
26652 this.pendingTimelineChanges_[type] = null;
26653 this.trigger('pendingtimelinechange');
26654 };
26655
26656 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
26657 var type = _ref.type,
26658 from = _ref.from,
26659 to = _ref.to;
26660
26661 if (typeof from === 'number' && typeof to === 'number') {
26662 this.pendingTimelineChanges_[type] = {
26663 type: type,
26664 from: from,
26665 to: to
26666 };
26667 this.trigger('pendingtimelinechange');
26668 }
26669
26670 return this.pendingTimelineChanges_[type];
26671 };
26672
26673 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
26674 var type = _ref2.type,
26675 from = _ref2.from,
26676 to = _ref2.to;
26677
26678 if (typeof from === 'number' && typeof to === 'number') {
26679 this.lastTimelineChanges_[type] = {
26680 type: type,
26681 from: from,
26682 to: to
26683 };
26684 delete this.pendingTimelineChanges_[type];
26685 this.trigger('timelinechange');
26686 }
26687
26688 return this.lastTimelineChanges_[type];
26689 };
26690
26691 _proto.dispose = function dispose() {
26692 this.trigger('dispose');
26693 this.pendingTimelineChanges_ = {};
26694 this.lastTimelineChanges_ = {};
26695 this.off();
26696 };
26697
26698 return TimelineChangeController;
26699 }(videojs__default["default"].EventTarget);
26700
26701 /* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/http-streaming/src/decrypter-worker.js */
26702 var workerCode = transform(getWorkerString(function () {
26703
26704 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
26705
26706 function createCommonjsModule(fn, basedir, module) {
26707 return module = {
26708 path: basedir,
26709 exports: {},
26710 require: function require(path, base) {
26711 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
26712 }
26713 }, fn(module, module.exports), module.exports;
26714 }
26715
26716 function commonjsRequire() {
26717 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
26718 }
26719
26720 var createClass = createCommonjsModule(function (module) {
26721 function _defineProperties(target, props) {
26722 for (var i = 0; i < props.length; i++) {
26723 var descriptor = props[i];
26724 descriptor.enumerable = descriptor.enumerable || false;
26725 descriptor.configurable = true;
26726 if ("value" in descriptor) descriptor.writable = true;
26727 Object.defineProperty(target, descriptor.key, descriptor);
26728 }
26729 }
26730
26731 function _createClass(Constructor, protoProps, staticProps) {
26732 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
26733 if (staticProps) _defineProperties(Constructor, staticProps);
26734 return Constructor;
26735 }
26736
26737 module.exports = _createClass;
26738 module.exports["default"] = module.exports, module.exports.__esModule = true;
26739 });
26740 var setPrototypeOf = createCommonjsModule(function (module) {
26741 function _setPrototypeOf(o, p) {
26742 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
26743 o.__proto__ = p;
26744 return o;
26745 };
26746
26747 module.exports["default"] = module.exports, module.exports.__esModule = true;
26748 return _setPrototypeOf(o, p);
26749 }
26750
26751 module.exports = _setPrototypeOf;
26752 module.exports["default"] = module.exports, module.exports.__esModule = true;
26753 });
26754 var inheritsLoose = createCommonjsModule(function (module) {
26755 function _inheritsLoose(subClass, superClass) {
26756 subClass.prototype = Object.create(superClass.prototype);
26757 subClass.prototype.constructor = subClass;
26758 setPrototypeOf(subClass, superClass);
26759 }
26760
26761 module.exports = _inheritsLoose;
26762 module.exports["default"] = module.exports, module.exports.__esModule = true;
26763 });
26764 /**
26765 * @file stream.js
26766 */
26767
26768 /**
26769 * A lightweight readable stream implemention that handles event dispatching.
26770 *
26771 * @class Stream
26772 */
26773
26774 var Stream = /*#__PURE__*/function () {
26775 function Stream() {
26776 this.listeners = {};
26777 }
26778 /**
26779 * Add a listener for a specified event type.
26780 *
26781 * @param {string} type the event name
26782 * @param {Function} listener the callback to be invoked when an event of
26783 * the specified type occurs
26784 */
26785
26786
26787 var _proto = Stream.prototype;
26788
26789 _proto.on = function on(type, listener) {
26790 if (!this.listeners[type]) {
26791 this.listeners[type] = [];
26792 }
26793
26794 this.listeners[type].push(listener);
26795 }
26796 /**
26797 * Remove a listener for a specified event type.
26798 *
26799 * @param {string} type the event name
26800 * @param {Function} listener a function previously registered for this
26801 * type of event through `on`
26802 * @return {boolean} if we could turn it off or not
26803 */
26804 ;
26805
26806 _proto.off = function off(type, listener) {
26807 if (!this.listeners[type]) {
26808 return false;
26809 }
26810
26811 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
26812 // In Video.js we slice listener functions
26813 // on trigger so that it does not mess up the order
26814 // while we loop through.
26815 //
26816 // Here we slice on off so that the loop in trigger
26817 // can continue using it's old reference to loop without
26818 // messing up the order.
26819
26820 this.listeners[type] = this.listeners[type].slice(0);
26821 this.listeners[type].splice(index, 1);
26822 return index > -1;
26823 }
26824 /**
26825 * Trigger an event of the specified type on this stream. Any additional
26826 * arguments to this function are passed as parameters to event listeners.
26827 *
26828 * @param {string} type the event name
26829 */
26830 ;
26831
26832 _proto.trigger = function trigger(type) {
26833 var callbacks = this.listeners[type];
26834
26835 if (!callbacks) {
26836 return;
26837 } // Slicing the arguments on every invocation of this method
26838 // can add a significant amount of overhead. Avoid the
26839 // intermediate object creation for the common case of a
26840 // single callback argument
26841
26842
26843 if (arguments.length === 2) {
26844 var length = callbacks.length;
26845
26846 for (var i = 0; i < length; ++i) {
26847 callbacks[i].call(this, arguments[1]);
26848 }
26849 } else {
26850 var args = Array.prototype.slice.call(arguments, 1);
26851 var _length = callbacks.length;
26852
26853 for (var _i = 0; _i < _length; ++_i) {
26854 callbacks[_i].apply(this, args);
26855 }
26856 }
26857 }
26858 /**
26859 * Destroys the stream and cleans up.
26860 */
26861 ;
26862
26863 _proto.dispose = function dispose() {
26864 this.listeners = {};
26865 }
26866 /**
26867 * Forwards all `data` events on this stream to the destination stream. The
26868 * destination stream should provide a method `push` to receive the data
26869 * events as they arrive.
26870 *
26871 * @param {Stream} destination the stream that will receive all `data` events
26872 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
26873 */
26874 ;
26875
26876 _proto.pipe = function pipe(destination) {
26877 this.on('data', function (data) {
26878 destination.push(data);
26879 });
26880 };
26881
26882 return Stream;
26883 }();
26884 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
26885
26886 /**
26887 * Returns the subarray of a Uint8Array without PKCS#7 padding.
26888 *
26889 * @param padded {Uint8Array} unencrypted bytes that have been padded
26890 * @return {Uint8Array} the unpadded bytes
26891 * @see http://tools.ietf.org/html/rfc5652
26892 */
26893
26894
26895 function unpad(padded) {
26896 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
26897 }
26898 /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
26899
26900 /**
26901 * @file aes.js
26902 *
26903 * This file contains an adaptation of the AES decryption algorithm
26904 * from the Standford Javascript Cryptography Library. That work is
26905 * covered by the following copyright and permissions notice:
26906 *
26907 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
26908 * All rights reserved.
26909 *
26910 * Redistribution and use in source and binary forms, with or without
26911 * modification, are permitted provided that the following conditions are
26912 * met:
26913 *
26914 * 1. Redistributions of source code must retain the above copyright
26915 * notice, this list of conditions and the following disclaimer.
26916 *
26917 * 2. Redistributions in binary form must reproduce the above
26918 * copyright notice, this list of conditions and the following
26919 * disclaimer in the documentation and/or other materials provided
26920 * with the distribution.
26921 *
26922 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
26923 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
26924 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26925 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
26926 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26927 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26928 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
26929 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26930 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26931 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26932 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26933 *
26934 * The views and conclusions contained in the software and documentation
26935 * are those of the authors and should not be interpreted as representing
26936 * official policies, either expressed or implied, of the authors.
26937 */
26938
26939 /**
26940 * Expand the S-box tables.
26941 *
26942 * @private
26943 */
26944
26945
26946 var precompute = function precompute() {
26947 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
26948 var encTable = tables[0];
26949 var decTable = tables[1];
26950 var sbox = encTable[4];
26951 var sboxInv = decTable[4];
26952 var i;
26953 var x;
26954 var xInv;
26955 var d = [];
26956 var th = [];
26957 var x2;
26958 var x4;
26959 var x8;
26960 var s;
26961 var tEnc;
26962 var tDec; // Compute double and third tables
26963
26964 for (i = 0; i < 256; i++) {
26965 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
26966 }
26967
26968 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
26969 // Compute sbox
26970 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
26971 s = s >> 8 ^ s & 255 ^ 99;
26972 sbox[x] = s;
26973 sboxInv[s] = x; // Compute MixColumns
26974
26975 x8 = d[x4 = d[x2 = d[x]]];
26976 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
26977 tEnc = d[s] * 0x101 ^ s * 0x1010100;
26978
26979 for (i = 0; i < 4; i++) {
26980 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
26981 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
26982 }
26983 } // Compactify. Considerable speedup on Firefox.
26984
26985
26986 for (i = 0; i < 5; i++) {
26987 encTable[i] = encTable[i].slice(0);
26988 decTable[i] = decTable[i].slice(0);
26989 }
26990
26991 return tables;
26992 };
26993
26994 var aesTables = null;
26995 /**
26996 * Schedule out an AES key for both encryption and decryption. This
26997 * is a low-level class. Use a cipher mode to do bulk encryption.
26998 *
26999 * @class AES
27000 * @param key {Array} The key as an array of 4, 6 or 8 words.
27001 */
27002
27003 var AES = /*#__PURE__*/function () {
27004 function AES(key) {
27005 /**
27006 * The expanded S-box and inverse S-box tables. These will be computed
27007 * on the client so that we don't have to send them down the wire.
27008 *
27009 * There are two tables, _tables[0] is for encryption and
27010 * _tables[1] is for decryption.
27011 *
27012 * The first 4 sub-tables are the expanded S-box with MixColumns. The
27013 * last (_tables[01][4]) is the S-box itself.
27014 *
27015 * @private
27016 */
27017 // if we have yet to precompute the S-box tables
27018 // do so now
27019 if (!aesTables) {
27020 aesTables = precompute();
27021 } // then make a copy of that object for use
27022
27023
27024 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
27025 var i;
27026 var j;
27027 var tmp;
27028 var sbox = this._tables[0][4];
27029 var decTable = this._tables[1];
27030 var keyLen = key.length;
27031 var rcon = 1;
27032
27033 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
27034 throw new Error('Invalid aes key size');
27035 }
27036
27037 var encKey = key.slice(0);
27038 var decKey = [];
27039 this._key = [encKey, decKey]; // schedule encryption keys
27040
27041 for (i = keyLen; i < 4 * keyLen + 28; i++) {
27042 tmp = encKey[i - 1]; // apply sbox
27043
27044 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
27045 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
27046
27047 if (i % keyLen === 0) {
27048 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
27049 rcon = rcon << 1 ^ (rcon >> 7) * 283;
27050 }
27051 }
27052
27053 encKey[i] = encKey[i - keyLen] ^ tmp;
27054 } // schedule decryption keys
27055
27056
27057 for (j = 0; i; j++, i--) {
27058 tmp = encKey[j & 3 ? i : i - 4];
27059
27060 if (i <= 4 || j < 4) {
27061 decKey[j] = tmp;
27062 } else {
27063 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
27064 }
27065 }
27066 }
27067 /**
27068 * Decrypt 16 bytes, specified as four 32-bit words.
27069 *
27070 * @param {number} encrypted0 the first word to decrypt
27071 * @param {number} encrypted1 the second word to decrypt
27072 * @param {number} encrypted2 the third word to decrypt
27073 * @param {number} encrypted3 the fourth word to decrypt
27074 * @param {Int32Array} out the array to write the decrypted words
27075 * into
27076 * @param {number} offset the offset into the output array to start
27077 * writing results
27078 * @return {Array} The plaintext.
27079 */
27080
27081
27082 var _proto = AES.prototype;
27083
27084 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
27085 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
27086
27087 var a = encrypted0 ^ key[0];
27088 var b = encrypted3 ^ key[1];
27089 var c = encrypted2 ^ key[2];
27090 var d = encrypted1 ^ key[3];
27091 var a2;
27092 var b2;
27093 var c2; // key.length === 2 ?
27094
27095 var nInnerRounds = key.length / 4 - 2;
27096 var i;
27097 var kIndex = 4;
27098 var table = this._tables[1]; // load up the tables
27099
27100 var table0 = table[0];
27101 var table1 = table[1];
27102 var table2 = table[2];
27103 var table3 = table[3];
27104 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
27105
27106 for (i = 0; i < nInnerRounds; i++) {
27107 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
27108 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
27109 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
27110 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
27111 kIndex += 4;
27112 a = a2;
27113 b = b2;
27114 c = c2;
27115 } // Last round.
27116
27117
27118 for (i = 0; i < 4; i++) {
27119 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
27120 a2 = a;
27121 a = b;
27122 b = c;
27123 c = d;
27124 d = a2;
27125 }
27126 };
27127
27128 return AES;
27129 }();
27130 /**
27131 * A wrapper around the Stream class to use setTimeout
27132 * and run stream "jobs" Asynchronously
27133 *
27134 * @class AsyncStream
27135 * @extends Stream
27136 */
27137
27138
27139 var AsyncStream = /*#__PURE__*/function (_Stream) {
27140 inheritsLoose(AsyncStream, _Stream);
27141
27142 function AsyncStream() {
27143 var _this;
27144
27145 _this = _Stream.call(this, Stream) || this;
27146 _this.jobs = [];
27147 _this.delay = 1;
27148 _this.timeout_ = null;
27149 return _this;
27150 }
27151 /**
27152 * process an async job
27153 *
27154 * @private
27155 */
27156
27157
27158 var _proto = AsyncStream.prototype;
27159
27160 _proto.processJob_ = function processJob_() {
27161 this.jobs.shift()();
27162
27163 if (this.jobs.length) {
27164 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
27165 } else {
27166 this.timeout_ = null;
27167 }
27168 }
27169 /**
27170 * push a job into the stream
27171 *
27172 * @param {Function} job the job to push into the stream
27173 */
27174 ;
27175
27176 _proto.push = function push(job) {
27177 this.jobs.push(job);
27178
27179 if (!this.timeout_) {
27180 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
27181 }
27182 };
27183
27184 return AsyncStream;
27185 }(Stream);
27186 /**
27187 * Convert network-order (big-endian) bytes into their little-endian
27188 * representation.
27189 */
27190
27191
27192 var ntoh = function ntoh(word) {
27193 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
27194 };
27195 /**
27196 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
27197 *
27198 * @param {Uint8Array} encrypted the encrypted bytes
27199 * @param {Uint32Array} key the bytes of the decryption key
27200 * @param {Uint32Array} initVector the initialization vector (IV) to
27201 * use for the first round of CBC.
27202 * @return {Uint8Array} the decrypted bytes
27203 *
27204 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
27205 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
27206 * @see https://tools.ietf.org/html/rfc2315
27207 */
27208
27209
27210 var decrypt = function decrypt(encrypted, key, initVector) {
27211 // word-level access to the encrypted bytes
27212 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
27213 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
27214
27215 var decrypted = new Uint8Array(encrypted.byteLength);
27216 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
27217 // decrypted data
27218
27219 var init0;
27220 var init1;
27221 var init2;
27222 var init3;
27223 var encrypted0;
27224 var encrypted1;
27225 var encrypted2;
27226 var encrypted3; // iteration variable
27227
27228 var wordIx; // pull out the words of the IV to ensure we don't modify the
27229 // passed-in reference and easier access
27230
27231 init0 = initVector[0];
27232 init1 = initVector[1];
27233 init2 = initVector[2];
27234 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
27235 // to each decrypted block
27236
27237 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
27238 // convert big-endian (network order) words into little-endian
27239 // (javascript order)
27240 encrypted0 = ntoh(encrypted32[wordIx]);
27241 encrypted1 = ntoh(encrypted32[wordIx + 1]);
27242 encrypted2 = ntoh(encrypted32[wordIx + 2]);
27243 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
27244
27245 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
27246 // plaintext
27247
27248 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
27249 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
27250 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
27251 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
27252
27253 init0 = encrypted0;
27254 init1 = encrypted1;
27255 init2 = encrypted2;
27256 init3 = encrypted3;
27257 }
27258
27259 return decrypted;
27260 };
27261 /**
27262 * The `Decrypter` class that manages decryption of AES
27263 * data through `AsyncStream` objects and the `decrypt`
27264 * function
27265 *
27266 * @param {Uint8Array} encrypted the encrypted bytes
27267 * @param {Uint32Array} key the bytes of the decryption key
27268 * @param {Uint32Array} initVector the initialization vector (IV) to
27269 * @param {Function} done the function to run when done
27270 * @class Decrypter
27271 */
27272
27273
27274 var Decrypter = /*#__PURE__*/function () {
27275 function Decrypter(encrypted, key, initVector, done) {
27276 var step = Decrypter.STEP;
27277 var encrypted32 = new Int32Array(encrypted.buffer);
27278 var decrypted = new Uint8Array(encrypted.byteLength);
27279 var i = 0;
27280 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
27281
27282 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27283
27284 for (i = step; i < encrypted32.length; i += step) {
27285 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
27286 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
27287 } // invoke the done() callback when everything is finished
27288
27289
27290 this.asyncStream_.push(function () {
27291 // remove pkcs#7 padding from the decrypted bytes
27292 done(null, unpad(decrypted));
27293 });
27294 }
27295 /**
27296 * a getter for step the maximum number of bytes to process at one time
27297 *
27298 * @return {number} the value of step 32000
27299 */
27300
27301
27302 var _proto = Decrypter.prototype;
27303 /**
27304 * @private
27305 */
27306
27307 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
27308 return function () {
27309 var bytes = decrypt(encrypted, key, initVector);
27310 decrypted.set(bytes, encrypted.byteOffset);
27311 };
27312 };
27313
27314 createClass(Decrypter, null, [{
27315 key: "STEP",
27316 get: function get() {
27317 // 4 * 8000;
27318 return 32000;
27319 }
27320 }]);
27321 return Decrypter;
27322 }();
27323
27324 var win;
27325
27326 if (typeof window !== "undefined") {
27327 win = window;
27328 } else if (typeof commonjsGlobal !== "undefined") {
27329 win = commonjsGlobal;
27330 } else if (typeof self !== "undefined") {
27331 win = self;
27332 } else {
27333 win = {};
27334 }
27335
27336 var window_1 = win;
27337
27338 var isArrayBufferView = function isArrayBufferView(obj) {
27339 if (ArrayBuffer.isView === 'function') {
27340 return ArrayBuffer.isView(obj);
27341 }
27342
27343 return obj && obj.buffer instanceof ArrayBuffer;
27344 };
27345
27346 var BigInt = window_1.BigInt || Number;
27347 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
27348
27349 (function () {
27350 var a = new Uint16Array([0xFFCC]);
27351 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
27352
27353 if (b[0] === 0xFF) {
27354 return 'big';
27355 }
27356
27357 if (b[0] === 0xCC) {
27358 return 'little';
27359 }
27360
27361 return 'unknown';
27362 })();
27363 /**
27364 * Creates an object for sending to a web worker modifying properties that are TypedArrays
27365 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
27366 *
27367 * @param {Object} message
27368 * Object of properties and values to send to the web worker
27369 * @return {Object}
27370 * Modified message with TypedArray values expanded
27371 * @function createTransferableMessage
27372 */
27373
27374
27375 var createTransferableMessage = function createTransferableMessage(message) {
27376 var transferable = {};
27377 Object.keys(message).forEach(function (key) {
27378 var value = message[key];
27379
27380 if (isArrayBufferView(value)) {
27381 transferable[key] = {
27382 bytes: value.buffer,
27383 byteOffset: value.byteOffset,
27384 byteLength: value.byteLength
27385 };
27386 } else {
27387 transferable[key] = value;
27388 }
27389 });
27390 return transferable;
27391 };
27392 /* global self */
27393
27394 /**
27395 * Our web worker interface so that things can talk to aes-decrypter
27396 * that will be running in a web worker. the scope is passed to this by
27397 * webworkify.
27398 */
27399
27400
27401 self.onmessage = function (event) {
27402 var data = event.data;
27403 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
27404 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
27405 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
27406 /* eslint-disable no-new, handle-callback-err */
27407
27408 new Decrypter(encrypted, key, iv, function (err, bytes) {
27409 self.postMessage(createTransferableMessage({
27410 source: data.source,
27411 decrypted: bytes
27412 }), [bytes.buffer]);
27413 });
27414 /* eslint-enable */
27415 };
27416 }));
27417 var Decrypter = factory(workerCode);
27418 /* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/http-streaming/src/decrypter-worker.js */
27419
27420 /**
27421 * Convert the properties of an HLS track into an audioTrackKind.
27422 *
27423 * @private
27424 */
27425
27426 var audioTrackKind_ = function audioTrackKind_(properties) {
27427 var kind = properties.default ? 'main' : 'alternative';
27428
27429 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
27430 kind = 'main-desc';
27431 }
27432
27433 return kind;
27434 };
27435 /**
27436 * Pause provided segment loader and playlist loader if active
27437 *
27438 * @param {SegmentLoader} segmentLoader
27439 * SegmentLoader to pause
27440 * @param {Object} mediaType
27441 * Active media type
27442 * @function stopLoaders
27443 */
27444
27445
27446 var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
27447 segmentLoader.abort();
27448 segmentLoader.pause();
27449
27450 if (mediaType && mediaType.activePlaylistLoader) {
27451 mediaType.activePlaylistLoader.pause();
27452 mediaType.activePlaylistLoader = null;
27453 }
27454 };
27455 /**
27456 * Start loading provided segment loader and playlist loader
27457 *
27458 * @param {PlaylistLoader} playlistLoader
27459 * PlaylistLoader to start loading
27460 * @param {Object} mediaType
27461 * Active media type
27462 * @function startLoaders
27463 */
27464
27465 var startLoaders = function startLoaders(playlistLoader, mediaType) {
27466 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
27467 // playlist loader
27468 mediaType.activePlaylistLoader = playlistLoader;
27469 playlistLoader.load();
27470 };
27471 /**
27472 * Returns a function to be called when the media group changes. It performs a
27473 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
27474 * change of group is merely a rendition switch of the same content at another encoding,
27475 * rather than a change of content, such as switching audio from English to Spanish.
27476 *
27477 * @param {string} type
27478 * MediaGroup type
27479 * @param {Object} settings
27480 * Object containing required information for media groups
27481 * @return {Function}
27482 * Handler for a non-destructive resync of SegmentLoader when the active media
27483 * group changes.
27484 * @function onGroupChanged
27485 */
27486
27487 var onGroupChanged = function onGroupChanged(type, settings) {
27488 return function () {
27489 var _settings$segmentLoad = settings.segmentLoaders,
27490 segmentLoader = _settings$segmentLoad[type],
27491 mainSegmentLoader = _settings$segmentLoad.main,
27492 mediaType = settings.mediaTypes[type];
27493 var activeTrack = mediaType.activeTrack();
27494 var activeGroup = mediaType.getActiveGroup();
27495 var previousActiveLoader = mediaType.activePlaylistLoader;
27496 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
27497
27498 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
27499 return;
27500 }
27501
27502 mediaType.lastGroup_ = activeGroup;
27503 mediaType.lastTrack_ = activeTrack;
27504 stopLoaders(segmentLoader, mediaType);
27505
27506 if (!activeGroup || activeGroup.isMasterPlaylist) {
27507 // there is no group active or active group is a main playlist and won't change
27508 return;
27509 }
27510
27511 if (!activeGroup.playlistLoader) {
27512 if (previousActiveLoader) {
27513 // The previous group had a playlist loader but the new active group does not
27514 // this means we are switching from demuxed to muxed audio. In this case we want to
27515 // do a destructive reset of the main segment loader and not restart the audio
27516 // loaders.
27517 mainSegmentLoader.resetEverything();
27518 }
27519
27520 return;
27521 } // Non-destructive resync
27522
27523
27524 segmentLoader.resyncLoader();
27525 startLoaders(activeGroup.playlistLoader, mediaType);
27526 };
27527 };
27528 var onGroupChanging = function onGroupChanging(type, settings) {
27529 return function () {
27530 var segmentLoader = settings.segmentLoaders[type],
27531 mediaType = settings.mediaTypes[type];
27532 mediaType.lastGroup_ = null;
27533 segmentLoader.abort();
27534 segmentLoader.pause();
27535 };
27536 };
27537 /**
27538 * Returns a function to be called when the media track changes. It performs a
27539 * destructive reset of the SegmentLoader to ensure we start loading as close to
27540 * currentTime as possible.
27541 *
27542 * @param {string} type
27543 * MediaGroup type
27544 * @param {Object} settings
27545 * Object containing required information for media groups
27546 * @return {Function}
27547 * Handler for a destructive reset of SegmentLoader when the active media
27548 * track changes.
27549 * @function onTrackChanged
27550 */
27551
27552 var onTrackChanged = function onTrackChanged(type, settings) {
27553 return function () {
27554 var masterPlaylistLoader = settings.masterPlaylistLoader,
27555 _settings$segmentLoad2 = settings.segmentLoaders,
27556 segmentLoader = _settings$segmentLoad2[type],
27557 mainSegmentLoader = _settings$segmentLoad2.main,
27558 mediaType = settings.mediaTypes[type];
27559 var activeTrack = mediaType.activeTrack();
27560 var activeGroup = mediaType.getActiveGroup();
27561 var previousActiveLoader = mediaType.activePlaylistLoader;
27562 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
27563
27564 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
27565 return;
27566 }
27567
27568 mediaType.lastGroup_ = activeGroup;
27569 mediaType.lastTrack_ = activeTrack;
27570 stopLoaders(segmentLoader, mediaType);
27571
27572 if (!activeGroup) {
27573 // there is no group active so we do not want to restart loaders
27574 return;
27575 }
27576
27577 if (activeGroup.isMasterPlaylist) {
27578 // track did not change, do nothing
27579 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
27580 return;
27581 }
27582
27583 var mpc = settings.vhs.masterPlaylistController_;
27584 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
27585
27586 if (mpc.media() === newPlaylist) {
27587 return;
27588 }
27589
27590 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
27591 masterPlaylistLoader.pause();
27592 mainSegmentLoader.resetEverything();
27593 mpc.fastQualityChange_(newPlaylist);
27594 return;
27595 }
27596
27597 if (type === 'AUDIO') {
27598 if (!activeGroup.playlistLoader) {
27599 // when switching from demuxed audio/video to muxed audio/video (noted by no
27600 // playlist loader for the audio group), we want to do a destructive reset of the
27601 // main segment loader and not restart the audio loaders
27602 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
27603 // it should be stopped
27604
27605 mainSegmentLoader.resetEverything();
27606 return;
27607 } // although the segment loader is an audio segment loader, call the setAudio
27608 // function to ensure it is prepared to re-append the init segment (or handle other
27609 // config changes)
27610
27611
27612 segmentLoader.setAudio(true);
27613 mainSegmentLoader.setAudio(false);
27614 }
27615
27616 if (previousActiveLoader === activeGroup.playlistLoader) {
27617 // Nothing has actually changed. This can happen because track change events can fire
27618 // multiple times for a "single" change. One for enabling the new active track, and
27619 // one for disabling the track that was active
27620 startLoaders(activeGroup.playlistLoader, mediaType);
27621 return;
27622 }
27623
27624 if (segmentLoader.track) {
27625 // For WebVTT, set the new text track in the segmentloader
27626 segmentLoader.track(activeTrack);
27627 } // destructive reset
27628
27629
27630 segmentLoader.resetEverything();
27631 startLoaders(activeGroup.playlistLoader, mediaType);
27632 };
27633 };
27634 var onError = {
27635 /**
27636 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27637 * an error.
27638 *
27639 * @param {string} type
27640 * MediaGroup type
27641 * @param {Object} settings
27642 * Object containing required information for media groups
27643 * @return {Function}
27644 * Error handler. Logs warning (or error if the playlist is blacklisted) to
27645 * console and switches back to default audio track.
27646 * @function onError.AUDIO
27647 */
27648 AUDIO: function AUDIO(type, settings) {
27649 return function () {
27650 var segmentLoader = settings.segmentLoaders[type],
27651 mediaType = settings.mediaTypes[type],
27652 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
27653 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
27654
27655 var activeTrack = mediaType.activeTrack();
27656 var activeGroup = mediaType.activeGroup();
27657 var id = (activeGroup.filter(function (group) {
27658 return group.default;
27659 })[0] || activeGroup[0]).id;
27660 var defaultTrack = mediaType.tracks[id];
27661
27662 if (activeTrack === defaultTrack) {
27663 // Default track encountered an error. All we can do now is blacklist the current
27664 // rendition and hope another will switch audio groups
27665 blacklistCurrentPlaylist({
27666 message: 'Problem encountered loading the default audio track.'
27667 });
27668 return;
27669 }
27670
27671 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
27672
27673 for (var trackId in mediaType.tracks) {
27674 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
27675 }
27676
27677 mediaType.onTrackChanged();
27678 };
27679 },
27680
27681 /**
27682 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
27683 * an error.
27684 *
27685 * @param {string} type
27686 * MediaGroup type
27687 * @param {Object} settings
27688 * Object containing required information for media groups
27689 * @return {Function}
27690 * Error handler. Logs warning to console and disables the active subtitle track
27691 * @function onError.SUBTITLES
27692 */
27693 SUBTITLES: function SUBTITLES(type, settings) {
27694 return function () {
27695 var segmentLoader = settings.segmentLoaders[type],
27696 mediaType = settings.mediaTypes[type];
27697 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
27698 stopLoaders(segmentLoader, mediaType);
27699 var track = mediaType.activeTrack();
27700
27701 if (track) {
27702 track.mode = 'disabled';
27703 }
27704
27705 mediaType.onTrackChanged();
27706 };
27707 }
27708 };
27709 var setupListeners = {
27710 /**
27711 * Setup event listeners for audio playlist loader
27712 *
27713 * @param {string} type
27714 * MediaGroup type
27715 * @param {PlaylistLoader|null} playlistLoader
27716 * PlaylistLoader to register listeners on
27717 * @param {Object} settings
27718 * Object containing required information for media groups
27719 * @function setupListeners.AUDIO
27720 */
27721 AUDIO: function AUDIO(type, playlistLoader, settings) {
27722 if (!playlistLoader) {
27723 // no playlist loader means audio will be muxed with the video
27724 return;
27725 }
27726
27727 var tech = settings.tech,
27728 requestOptions = settings.requestOptions,
27729 segmentLoader = settings.segmentLoaders[type];
27730 playlistLoader.on('loadedmetadata', function () {
27731 var media = playlistLoader.media();
27732 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
27733 // permits, start downloading segments
27734
27735 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27736 segmentLoader.load();
27737 }
27738 });
27739 playlistLoader.on('loadedplaylist', function () {
27740 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27741
27742 if (!tech.paused()) {
27743 segmentLoader.load();
27744 }
27745 });
27746 playlistLoader.on('error', onError[type](type, settings));
27747 },
27748
27749 /**
27750 * Setup event listeners for subtitle playlist loader
27751 *
27752 * @param {string} type
27753 * MediaGroup type
27754 * @param {PlaylistLoader|null} playlistLoader
27755 * PlaylistLoader to register listeners on
27756 * @param {Object} settings
27757 * Object containing required information for media groups
27758 * @function setupListeners.SUBTITLES
27759 */
27760 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
27761 var tech = settings.tech,
27762 requestOptions = settings.requestOptions,
27763 segmentLoader = settings.segmentLoaders[type],
27764 mediaType = settings.mediaTypes[type];
27765 playlistLoader.on('loadedmetadata', function () {
27766 var media = playlistLoader.media();
27767 segmentLoader.playlist(media, requestOptions);
27768 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
27769 // permits, start downloading segments
27770
27771 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
27772 segmentLoader.load();
27773 }
27774 });
27775 playlistLoader.on('loadedplaylist', function () {
27776 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
27777
27778 if (!tech.paused()) {
27779 segmentLoader.load();
27780 }
27781 });
27782 playlistLoader.on('error', onError[type](type, settings));
27783 }
27784 };
27785 var initialize = {
27786 /**
27787 * Setup PlaylistLoaders and AudioTracks for the audio groups
27788 *
27789 * @param {string} type
27790 * MediaGroup type
27791 * @param {Object} settings
27792 * Object containing required information for media groups
27793 * @function initialize.AUDIO
27794 */
27795 'AUDIO': function AUDIO(type, settings) {
27796 var vhs = settings.vhs,
27797 sourceType = settings.sourceType,
27798 segmentLoader = settings.segmentLoaders[type],
27799 requestOptions = settings.requestOptions,
27800 mediaGroups = settings.master.mediaGroups,
27801 _settings$mediaTypes$ = settings.mediaTypes[type],
27802 groups = _settings$mediaTypes$.groups,
27803 tracks = _settings$mediaTypes$.tracks,
27804 logger_ = _settings$mediaTypes$.logger_,
27805 masterPlaylistLoader = settings.masterPlaylistLoader;
27806 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
27807
27808 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
27809 mediaGroups[type] = {
27810 main: {
27811 default: {
27812 default: true
27813 }
27814 }
27815 };
27816
27817 if (audioOnlyMaster) {
27818 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
27819 }
27820 }
27821
27822 for (var groupId in mediaGroups[type]) {
27823 if (!groups[groupId]) {
27824 groups[groupId] = [];
27825 }
27826
27827 for (var variantLabel in mediaGroups[type][groupId]) {
27828 var properties = mediaGroups[type][groupId][variantLabel];
27829 var playlistLoader = void 0;
27830
27831 if (audioOnlyMaster) {
27832 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
27833 properties.isMasterPlaylist = true;
27834 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
27835 // use the resolved media playlist object
27836 } else if (sourceType === 'vhs-json' && properties.playlists) {
27837 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
27838 } else if (properties.resolvedUri) {
27839 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
27840 // should we even have properties.playlists in this check.
27841 } else if (properties.playlists && sourceType === 'dash') {
27842 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27843 } else {
27844 // no resolvedUri means the audio is muxed with the video when using this
27845 // audio track
27846 playlistLoader = null;
27847 }
27848
27849 properties = videojs__default["default"].mergeOptions({
27850 id: variantLabel,
27851 playlistLoader: playlistLoader
27852 }, properties);
27853 setupListeners[type](type, properties.playlistLoader, settings);
27854 groups[groupId].push(properties);
27855
27856 if (typeof tracks[variantLabel] === 'undefined') {
27857 var track = new videojs__default["default"].AudioTrack({
27858 id: variantLabel,
27859 kind: audioTrackKind_(properties),
27860 enabled: false,
27861 language: properties.language,
27862 default: properties.default,
27863 label: variantLabel
27864 });
27865 tracks[variantLabel] = track;
27866 }
27867 }
27868 } // setup single error event handler for the segment loader
27869
27870
27871 segmentLoader.on('error', onError[type](type, settings));
27872 },
27873
27874 /**
27875 * Setup PlaylistLoaders and TextTracks for the subtitle groups
27876 *
27877 * @param {string} type
27878 * MediaGroup type
27879 * @param {Object} settings
27880 * Object containing required information for media groups
27881 * @function initialize.SUBTITLES
27882 */
27883 'SUBTITLES': function SUBTITLES(type, settings) {
27884 var tech = settings.tech,
27885 vhs = settings.vhs,
27886 sourceType = settings.sourceType,
27887 segmentLoader = settings.segmentLoaders[type],
27888 requestOptions = settings.requestOptions,
27889 mediaGroups = settings.master.mediaGroups,
27890 _settings$mediaTypes$2 = settings.mediaTypes[type],
27891 groups = _settings$mediaTypes$2.groups,
27892 tracks = _settings$mediaTypes$2.tracks,
27893 masterPlaylistLoader = settings.masterPlaylistLoader;
27894
27895 for (var groupId in mediaGroups[type]) {
27896 if (!groups[groupId]) {
27897 groups[groupId] = [];
27898 }
27899
27900 for (var variantLabel in mediaGroups[type][groupId]) {
27901 if (mediaGroups[type][groupId][variantLabel].forced) {
27902 // Subtitle playlists with the forced attribute are not selectable in Safari.
27903 // According to Apple's HLS Authoring Specification:
27904 // If content has forced subtitles and regular subtitles in a given language,
27905 // the regular subtitles track in that language MUST contain both the forced
27906 // subtitles and the regular subtitles for that language.
27907 // Because of this requirement and that Safari does not add forced subtitles,
27908 // forced subtitles are skipped here to maintain consistent experience across
27909 // all platforms
27910 continue;
27911 }
27912
27913 var properties = mediaGroups[type][groupId][variantLabel];
27914 var playlistLoader = void 0;
27915
27916 if (sourceType === 'hls') {
27917 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
27918 } else if (sourceType === 'dash') {
27919 var playlists = properties.playlists.filter(function (p) {
27920 return p.excludeUntil !== Infinity;
27921 });
27922
27923 if (!playlists.length) {
27924 return;
27925 }
27926
27927 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
27928 } else if (sourceType === 'vhs-json') {
27929 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
27930 // as provided, otherwise use the resolved URI to load the playlist
27931 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
27932 }
27933
27934 properties = videojs__default["default"].mergeOptions({
27935 id: variantLabel,
27936 playlistLoader: playlistLoader
27937 }, properties);
27938 setupListeners[type](type, properties.playlistLoader, settings);
27939 groups[groupId].push(properties);
27940
27941 if (typeof tracks[variantLabel] === 'undefined') {
27942 var track = tech.addRemoteTextTrack({
27943 id: variantLabel,
27944 kind: 'subtitles',
27945 default: properties.default && properties.autoselect,
27946 language: properties.language,
27947 label: variantLabel
27948 }, false).track;
27949 tracks[variantLabel] = track;
27950 }
27951 }
27952 } // setup single error event handler for the segment loader
27953
27954
27955 segmentLoader.on('error', onError[type](type, settings));
27956 },
27957
27958 /**
27959 * Setup TextTracks for the closed-caption groups
27960 *
27961 * @param {String} type
27962 * MediaGroup type
27963 * @param {Object} settings
27964 * Object containing required information for media groups
27965 * @function initialize['CLOSED-CAPTIONS']
27966 */
27967 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
27968 var tech = settings.tech,
27969 mediaGroups = settings.master.mediaGroups,
27970 _settings$mediaTypes$3 = settings.mediaTypes[type],
27971 groups = _settings$mediaTypes$3.groups,
27972 tracks = _settings$mediaTypes$3.tracks;
27973
27974 for (var groupId in mediaGroups[type]) {
27975 if (!groups[groupId]) {
27976 groups[groupId] = [];
27977 }
27978
27979 for (var variantLabel in mediaGroups[type][groupId]) {
27980 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
27981
27982 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
27983 continue;
27984 }
27985
27986 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
27987 var newProps = {
27988 label: variantLabel,
27989 language: properties.language,
27990 instreamId: properties.instreamId,
27991 default: properties.default && properties.autoselect
27992 };
27993
27994 if (captionServices[newProps.instreamId]) {
27995 newProps = videojs__default["default"].mergeOptions(newProps, captionServices[newProps.instreamId]);
27996 }
27997
27998 if (newProps.default === undefined) {
27999 delete newProps.default;
28000 } // No PlaylistLoader is required for Closed-Captions because the captions are
28001 // embedded within the video stream
28002
28003
28004 groups[groupId].push(videojs__default["default"].mergeOptions({
28005 id: variantLabel
28006 }, properties));
28007
28008 if (typeof tracks[variantLabel] === 'undefined') {
28009 var track = tech.addRemoteTextTrack({
28010 id: newProps.instreamId,
28011 kind: 'captions',
28012 default: newProps.default,
28013 language: newProps.language,
28014 label: newProps.label
28015 }, false).track;
28016 tracks[variantLabel] = track;
28017 }
28018 }
28019 }
28020 }
28021 };
28022
28023 var groupMatch = function groupMatch(list, media) {
28024 for (var i = 0; i < list.length; i++) {
28025 if (playlistMatch(media, list[i])) {
28026 return true;
28027 }
28028
28029 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
28030 return true;
28031 }
28032 }
28033
28034 return false;
28035 };
28036 /**
28037 * Returns a function used to get the active group of the provided type
28038 *
28039 * @param {string} type
28040 * MediaGroup type
28041 * @param {Object} settings
28042 * Object containing required information for media groups
28043 * @return {Function}
28044 * Function that returns the active media group for the provided type. Takes an
28045 * optional parameter {TextTrack} track. If no track is provided, a list of all
28046 * variants in the group, otherwise the variant corresponding to the provided
28047 * track is returned.
28048 * @function activeGroup
28049 */
28050
28051
28052 var activeGroup = function activeGroup(type, settings) {
28053 return function (track) {
28054 var masterPlaylistLoader = settings.masterPlaylistLoader,
28055 groups = settings.mediaTypes[type].groups;
28056 var media = masterPlaylistLoader.media();
28057
28058 if (!media) {
28059 return null;
28060 }
28061
28062 var variants = null; // set to variants to main media active group
28063
28064 if (media.attributes[type]) {
28065 variants = groups[media.attributes[type]];
28066 }
28067
28068 var groupKeys = Object.keys(groups);
28069
28070 if (!variants) {
28071 // find the masterPlaylistLoader media
28072 // that is in a media group if we are dealing
28073 // with audio only
28074 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
28075 for (var i = 0; i < groupKeys.length; i++) {
28076 var groupPropertyList = groups[groupKeys[i]];
28077
28078 if (groupMatch(groupPropertyList, media)) {
28079 variants = groupPropertyList;
28080 break;
28081 }
28082 } // use the main group if it exists
28083
28084 } else if (groups.main) {
28085 variants = groups.main; // only one group, use that one
28086 } else if (groupKeys.length === 1) {
28087 variants = groups[groupKeys[0]];
28088 }
28089 }
28090
28091 if (typeof track === 'undefined') {
28092 return variants;
28093 }
28094
28095 if (track === null || !variants) {
28096 // An active track was specified so a corresponding group is expected. track === null
28097 // means no track is currently active so there is no corresponding group
28098 return null;
28099 }
28100
28101 return variants.filter(function (props) {
28102 return props.id === track.id;
28103 })[0] || null;
28104 };
28105 };
28106 var activeTrack = {
28107 /**
28108 * Returns a function used to get the active track of type provided
28109 *
28110 * @param {string} type
28111 * MediaGroup type
28112 * @param {Object} settings
28113 * Object containing required information for media groups
28114 * @return {Function}
28115 * Function that returns the active media track for the provided type. Returns
28116 * null if no track is active
28117 * @function activeTrack.AUDIO
28118 */
28119 AUDIO: function AUDIO(type, settings) {
28120 return function () {
28121 var tracks = settings.mediaTypes[type].tracks;
28122
28123 for (var id in tracks) {
28124 if (tracks[id].enabled) {
28125 return tracks[id];
28126 }
28127 }
28128
28129 return null;
28130 };
28131 },
28132
28133 /**
28134 * Returns a function used to get the active track of type provided
28135 *
28136 * @param {string} type
28137 * MediaGroup type
28138 * @param {Object} settings
28139 * Object containing required information for media groups
28140 * @return {Function}
28141 * Function that returns the active media track for the provided type. Returns
28142 * null if no track is active
28143 * @function activeTrack.SUBTITLES
28144 */
28145 SUBTITLES: function SUBTITLES(type, settings) {
28146 return function () {
28147 var tracks = settings.mediaTypes[type].tracks;
28148
28149 for (var id in tracks) {
28150 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
28151 return tracks[id];
28152 }
28153 }
28154
28155 return null;
28156 };
28157 }
28158 };
28159 var getActiveGroup = function getActiveGroup(type, _ref) {
28160 var mediaTypes = _ref.mediaTypes;
28161 return function () {
28162 var activeTrack_ = mediaTypes[type].activeTrack();
28163
28164 if (!activeTrack_) {
28165 return null;
28166 }
28167
28168 return mediaTypes[type].activeGroup(activeTrack_);
28169 };
28170 };
28171 /**
28172 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
28173 * Closed-Captions) specified in the master manifest.
28174 *
28175 * @param {Object} settings
28176 * Object containing required information for setting up the media groups
28177 * @param {Tech} settings.tech
28178 * The tech of the player
28179 * @param {Object} settings.requestOptions
28180 * XHR request options used by the segment loaders
28181 * @param {PlaylistLoader} settings.masterPlaylistLoader
28182 * PlaylistLoader for the master source
28183 * @param {VhsHandler} settings.vhs
28184 * VHS SourceHandler
28185 * @param {Object} settings.master
28186 * The parsed master manifest
28187 * @param {Object} settings.mediaTypes
28188 * Object to store the loaders, tracks, and utility methods for each media type
28189 * @param {Function} settings.blacklistCurrentPlaylist
28190 * Blacklists the current rendition and forces a rendition switch.
28191 * @function setupMediaGroups
28192 */
28193
28194 var setupMediaGroups = function setupMediaGroups(settings) {
28195 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
28196 initialize[type](type, settings);
28197 });
28198 var mediaTypes = settings.mediaTypes,
28199 masterPlaylistLoader = settings.masterPlaylistLoader,
28200 tech = settings.tech,
28201 vhs = settings.vhs,
28202 _settings$segmentLoad3 = settings.segmentLoaders,
28203 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
28204 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
28205
28206 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28207 mediaTypes[type].activeGroup = activeGroup(type, settings);
28208 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
28209 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
28210 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
28211 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
28212 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
28213 }); // DO NOT enable the default subtitle or caption track.
28214 // DO enable the default audio track
28215
28216 var audioGroup = mediaTypes.AUDIO.activeGroup();
28217
28218 if (audioGroup) {
28219 var groupId = (audioGroup.filter(function (group) {
28220 return group.default;
28221 })[0] || audioGroup[0]).id;
28222 mediaTypes.AUDIO.tracks[groupId].enabled = true;
28223 mediaTypes.AUDIO.onGroupChanged();
28224 mediaTypes.AUDIO.onTrackChanged();
28225 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
28226 // track is changed, but needs to be handled here since the track may not be considered
28227 // changed on the first call to onTrackChanged
28228
28229 if (!activeAudioGroup.playlistLoader) {
28230 // either audio is muxed with video or the stream is audio only
28231 mainSegmentLoader.setAudio(true);
28232 } else {
28233 // audio is demuxed
28234 mainSegmentLoader.setAudio(false);
28235 audioSegmentLoader.setAudio(true);
28236 }
28237 }
28238
28239 masterPlaylistLoader.on('mediachange', function () {
28240 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28241 return mediaTypes[type].onGroupChanged();
28242 });
28243 });
28244 masterPlaylistLoader.on('mediachanging', function () {
28245 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
28246 return mediaTypes[type].onGroupChanging();
28247 });
28248 }); // custom audio track change event handler for usage event
28249
28250 var onAudioTrackChanged = function onAudioTrackChanged() {
28251 mediaTypes.AUDIO.onTrackChanged();
28252 tech.trigger({
28253 type: 'usage',
28254 name: 'vhs-audio-change'
28255 });
28256 tech.trigger({
28257 type: 'usage',
28258 name: 'hls-audio-change'
28259 });
28260 };
28261
28262 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
28263 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28264 vhs.on('dispose', function () {
28265 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
28266 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
28267 }); // clear existing audio tracks and add the ones we just created
28268
28269 tech.clearTracks('audio');
28270
28271 for (var id in mediaTypes.AUDIO.tracks) {
28272 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
28273 }
28274 };
28275 /**
28276 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
28277 * media type
28278 *
28279 * @return {Object}
28280 * Object to store the loaders, tracks, and utility methods for each media type
28281 * @function createMediaTypes
28282 */
28283
28284 var createMediaTypes = function createMediaTypes() {
28285 var mediaTypes = {};
28286 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
28287 mediaTypes[type] = {
28288 groups: {},
28289 tracks: {},
28290 activePlaylistLoader: null,
28291 activeGroup: noop,
28292 activeTrack: noop,
28293 getActiveGroup: noop,
28294 onGroupChanged: noop,
28295 onTrackChanged: noop,
28296 lastTrack_: null,
28297 logger_: logger("MediaGroups[" + type + "]")
28298 };
28299 });
28300 return mediaTypes;
28301 };
28302
28303 var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
28304 var Vhs$1; // SegmentLoader stats that need to have each loader's
28305 // values summed to calculate the final value
28306
28307 var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
28308
28309 var sumLoaderStat = function sumLoaderStat(stat) {
28310 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
28311 };
28312
28313 var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
28314 var currentPlaylist = _ref.currentPlaylist,
28315 buffered = _ref.buffered,
28316 currentTime = _ref.currentTime,
28317 nextPlaylist = _ref.nextPlaylist,
28318 bufferLowWaterLine = _ref.bufferLowWaterLine,
28319 bufferHighWaterLine = _ref.bufferHighWaterLine,
28320 duration = _ref.duration,
28321 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
28322 log = _ref.log;
28323
28324 // we have no other playlist to switch to
28325 if (!nextPlaylist) {
28326 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
28327 return false;
28328 }
28329
28330 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
28331
28332 if (!currentPlaylist) {
28333 log(sharedLogLine + " as current playlist is not set");
28334 return true;
28335 } // no need to switch if playlist is the same
28336
28337
28338 if (nextPlaylist.id === currentPlaylist.id) {
28339 return false;
28340 } // determine if current time is in a buffered range.
28341
28342
28343 var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
28344 // This is because in LIVE, the player plays 3 segments from the end of the
28345 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
28346 // in those segments, a viewer will never experience a rendition upswitch.
28347
28348 if (!currentPlaylist.endList) {
28349 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
28350 // doubles the time to first playback.
28351 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
28352 log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
28353 return false;
28354 }
28355
28356 log(sharedLogLine + " as current playlist is live");
28357 return true;
28358 }
28359
28360 var forwardBuffer = timeAheadOf(buffered, currentTime);
28361 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
28362 // duration is below the max potential low water line
28363
28364 if (duration < maxBufferLowWaterLine) {
28365 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
28366 return true;
28367 }
28368
28369 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
28370 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
28371 // we can switch down
28372
28373 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
28374 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
28375
28376 if (experimentalBufferBasedABR) {
28377 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
28378 }
28379
28380 log(logLine);
28381 return true;
28382 } // and if our buffer is higher than the low water line,
28383 // we can switch up
28384
28385
28386 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
28387 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
28388
28389 if (experimentalBufferBasedABR) {
28390 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
28391 }
28392
28393 log(_logLine);
28394 return true;
28395 }
28396
28397 log("not " + sharedLogLine + " as no switching criteria met");
28398 return false;
28399 };
28400 /**
28401 * the master playlist controller controller all interactons
28402 * between playlists and segmentloaders. At this time this mainly
28403 * involves a master playlist and a series of audio playlists
28404 * if they are available
28405 *
28406 * @class MasterPlaylistController
28407 * @extends videojs.EventTarget
28408 */
28409
28410
28411 var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
28412 inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
28413
28414 function MasterPlaylistController(options) {
28415 var _this;
28416
28417 _this = _videojs$EventTarget.call(this) || this;
28418 var src = options.src,
28419 handleManifestRedirects = options.handleManifestRedirects,
28420 withCredentials = options.withCredentials,
28421 tech = options.tech,
28422 bandwidth = options.bandwidth,
28423 externVhs = options.externVhs,
28424 useCueTags = options.useCueTags,
28425 blacklistDuration = options.blacklistDuration,
28426 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
28427 sourceType = options.sourceType,
28428 cacheEncryptionKeys = options.cacheEncryptionKeys,
28429 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
28430 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
28431 captionServices = options.captionServices;
28432
28433 if (!src) {
28434 throw new Error('A non-empty playlist URL or JSON manifest string is required');
28435 }
28436
28437 var maxPlaylistRetries = options.maxPlaylistRetries;
28438
28439 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
28440 maxPlaylistRetries = Infinity;
28441 }
28442
28443 Vhs$1 = externVhs;
28444 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
28445 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
28446 _this.withCredentials = withCredentials;
28447 _this.tech_ = tech;
28448 _this.vhs_ = tech.vhs;
28449 _this.sourceType_ = sourceType;
28450 _this.useCueTags_ = useCueTags;
28451 _this.blacklistDuration = blacklistDuration;
28452 _this.maxPlaylistRetries = maxPlaylistRetries;
28453 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
28454
28455 if (_this.useCueTags_) {
28456 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
28457 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
28458 }
28459
28460 _this.requestOptions_ = {
28461 withCredentials: withCredentials,
28462 handleManifestRedirects: handleManifestRedirects,
28463 maxPlaylistRetries: maxPlaylistRetries,
28464 timeout: null
28465 };
28466
28467 _this.on('error', _this.pauseLoading);
28468
28469 _this.mediaTypes_ = createMediaTypes();
28470 _this.mediaSource = new window.MediaSource();
28471 _this.handleDurationChange_ = _this.handleDurationChange_.bind(assertThisInitialized(_this));
28472 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(assertThisInitialized(_this));
28473 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(assertThisInitialized(_this));
28474
28475 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
28476
28477
28478 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
28479
28480 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
28481 // everything, and the MediaSource should not be detached without a proper disposal
28482
28483
28484 _this.seekable_ = videojs__default["default"].createTimeRanges();
28485 _this.hasPlayed_ = false;
28486 _this.syncController_ = new SyncController(options);
28487 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
28488 kind: 'metadata',
28489 label: 'segment-metadata'
28490 }, false).track;
28491 _this.decrypter_ = new Decrypter();
28492 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
28493 _this.inbandTextTracks_ = {};
28494 _this.timelineChangeController_ = new TimelineChangeController();
28495 var segmentLoaderSettings = {
28496 vhs: _this.vhs_,
28497 parse708captions: options.parse708captions,
28498 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
28499 captionServices: captionServices,
28500 mediaSource: _this.mediaSource,
28501 currentTime: _this.tech_.currentTime.bind(_this.tech_),
28502 seekable: function seekable() {
28503 return _this.seekable();
28504 },
28505 seeking: function seeking() {
28506 return _this.tech_.seeking();
28507 },
28508 duration: function duration() {
28509 return _this.duration();
28510 },
28511 hasPlayed: function hasPlayed() {
28512 return _this.hasPlayed_;
28513 },
28514 goalBufferLength: function goalBufferLength() {
28515 return _this.goalBufferLength();
28516 },
28517 bandwidth: bandwidth,
28518 syncController: _this.syncController_,
28519 decrypter: _this.decrypter_,
28520 sourceType: _this.sourceType_,
28521 inbandTextTracks: _this.inbandTextTracks_,
28522 cacheEncryptionKeys: cacheEncryptionKeys,
28523 sourceUpdater: _this.sourceUpdater_,
28524 timelineChangeController: _this.timelineChangeController_,
28525 experimentalExactManifestTimings: options.experimentalExactManifestTimings
28526 }; // The source type check not only determines whether a special DASH playlist loader
28527 // should be used, but also covers the case where the provided src is a vhs-json
28528 // manifest object (instead of a URL). In the case of vhs-json, the default
28529 // PlaylistLoader should be used.
28530
28531 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
28532
28533 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
28534 // combined audio/video or just video when alternate audio track is selected
28535
28536
28537 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28538 segmentMetadataTrack: _this.segmentMetadataTrack_,
28539 loaderType: 'main'
28540 }), options); // alternate audio track
28541
28542 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28543 loaderType: 'audio'
28544 }), options);
28545 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
28546 loaderType: 'vtt',
28547 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks,
28548 loadVttJs: function loadVttJs() {
28549 return new Promise(function (resolve, reject) {
28550 function onLoad() {
28551 tech.off('vttjserror', onError);
28552 resolve();
28553 }
28554
28555 function onError() {
28556 tech.off('vttjsloaded', onLoad);
28557 reject();
28558 }
28559
28560 tech.one('vttjsloaded', onLoad);
28561 tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:
28562
28563 tech.addWebVttScript_();
28564 });
28565 }
28566 }), options);
28567
28568 _this.setupSegmentLoaderListeners_();
28569
28570 if (_this.experimentalBufferBasedABR) {
28571 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
28572 return _this.startABRTimer_();
28573 });
28574
28575 _this.tech_.on('pause', function () {
28576 return _this.stopABRTimer_();
28577 });
28578
28579 _this.tech_.on('play', function () {
28580 return _this.startABRTimer_();
28581 });
28582 } // Create SegmentLoader stat-getters
28583 // mediaRequests_
28584 // mediaRequestsAborted_
28585 // mediaRequestsTimedout_
28586 // mediaRequestsErrored_
28587 // mediaTransferDuration_
28588 // mediaBytesTransferred_
28589 // mediaAppends_
28590
28591
28592 loaderStats.forEach(function (stat) {
28593 _this[stat + '_'] = sumLoaderStat.bind(assertThisInitialized(_this), stat);
28594 });
28595 _this.logger_ = logger('MPC');
28596 _this.triggeredFmp4Usage = false;
28597
28598 if (_this.tech_.preload() === 'none') {
28599 _this.loadOnPlay_ = function () {
28600 _this.loadOnPlay_ = null;
28601
28602 _this.masterPlaylistLoader_.load();
28603 };
28604
28605 _this.tech_.one('play', _this.loadOnPlay_);
28606 } else {
28607 _this.masterPlaylistLoader_.load();
28608 }
28609
28610 _this.timeToLoadedData__ = -1;
28611 _this.mainAppendsToLoadedData__ = -1;
28612 _this.audioAppendsToLoadedData__ = -1;
28613 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
28614
28615 _this.tech_.one(event, function () {
28616 var timeToLoadedDataStart = Date.now();
28617
28618 _this.tech_.one('loadeddata', function () {
28619 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
28620 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
28621 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
28622 });
28623 });
28624
28625 return _this;
28626 }
28627
28628 var _proto = MasterPlaylistController.prototype;
28629
28630 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
28631 return this.mainAppendsToLoadedData__;
28632 };
28633
28634 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
28635 return this.audioAppendsToLoadedData__;
28636 };
28637
28638 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
28639 var main = this.mainAppendsToLoadedData_();
28640 var audio = this.audioAppendsToLoadedData_();
28641
28642 if (main === -1 || audio === -1) {
28643 return -1;
28644 }
28645
28646 return main + audio;
28647 };
28648
28649 _proto.timeToLoadedData_ = function timeToLoadedData_() {
28650 return this.timeToLoadedData__;
28651 }
28652 /**
28653 * Run selectPlaylist and switch to the new playlist if we should
28654 *
28655 * @param {string} [reason=abr] a reason for why the ABR check is made
28656 * @private
28657 */
28658 ;
28659
28660 _proto.checkABR_ = function checkABR_(reason) {
28661 if (reason === void 0) {
28662 reason = 'abr';
28663 }
28664
28665 var nextPlaylist = this.selectPlaylist();
28666
28667 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
28668 this.switchMedia_(nextPlaylist, reason);
28669 }
28670 };
28671
28672 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
28673 var oldMedia = this.media();
28674 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
28675 var newId = playlist.id || playlist.uri;
28676
28677 if (oldId && oldId !== newId) {
28678 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
28679 this.tech_.trigger({
28680 type: 'usage',
28681 name: "vhs-rendition-change-" + cause
28682 });
28683 }
28684
28685 this.masterPlaylistLoader_.media(playlist, delay);
28686 }
28687 /**
28688 * Start a timer that periodically calls checkABR_
28689 *
28690 * @private
28691 */
28692 ;
28693
28694 _proto.startABRTimer_ = function startABRTimer_() {
28695 var _this2 = this;
28696
28697 this.stopABRTimer_();
28698 this.abrTimer_ = window.setInterval(function () {
28699 return _this2.checkABR_();
28700 }, 250);
28701 }
28702 /**
28703 * Stop the timer that periodically calls checkABR_
28704 *
28705 * @private
28706 */
28707 ;
28708
28709 _proto.stopABRTimer_ = function stopABRTimer_() {
28710 // if we're scrubbing, we don't need to pause.
28711 // This getter will be added to Video.js in version 7.11.
28712 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
28713 return;
28714 }
28715
28716 window.clearInterval(this.abrTimer_);
28717 this.abrTimer_ = null;
28718 }
28719 /**
28720 * Get a list of playlists for the currently selected audio playlist
28721 *
28722 * @return {Array} the array of audio playlists
28723 */
28724 ;
28725
28726 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
28727 var master = this.master();
28728 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
28729 // assume that the audio tracks are contained in masters
28730 // playlist array, use that or an empty array.
28731
28732 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
28733 return defaultPlaylists;
28734 }
28735
28736 var AUDIO = master.mediaGroups.AUDIO;
28737 var groupKeys = Object.keys(AUDIO);
28738 var track; // get the current active track
28739
28740 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
28741 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
28742 } else {
28743 // default group is `main` or just the first group.
28744 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
28745
28746 for (var label in defaultGroup) {
28747 if (defaultGroup[label].default) {
28748 track = {
28749 label: label
28750 };
28751 break;
28752 }
28753 }
28754 } // no active track no playlists.
28755
28756
28757 if (!track) {
28758 return defaultPlaylists;
28759 }
28760
28761 var playlists = []; // get all of the playlists that are possible for the
28762 // active track.
28763
28764 for (var group in AUDIO) {
28765 if (AUDIO[group][track.label]) {
28766 var properties = AUDIO[group][track.label];
28767
28768 if (properties.playlists && properties.playlists.length) {
28769 playlists.push.apply(playlists, properties.playlists);
28770 } else if (properties.uri) {
28771 playlists.push(properties);
28772 } else if (master.playlists.length) {
28773 // if an audio group does not have a uri
28774 // see if we have main playlists that use it as a group.
28775 // if we do then add those to the playlists list.
28776 for (var i = 0; i < master.playlists.length; i++) {
28777 var playlist = master.playlists[i];
28778
28779 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
28780 playlists.push(playlist);
28781 }
28782 }
28783 }
28784 }
28785 }
28786
28787 if (!playlists.length) {
28788 return defaultPlaylists;
28789 }
28790
28791 return playlists;
28792 }
28793 /**
28794 * Register event handlers on the master playlist loader. A helper
28795 * function for construction time.
28796 *
28797 * @private
28798 */
28799 ;
28800
28801 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
28802 var _this3 = this;
28803
28804 this.masterPlaylistLoader_.on('loadedmetadata', function () {
28805 var media = _this3.masterPlaylistLoader_.media();
28806
28807 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28808 // timeout the request.
28809
28810 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28811 _this3.requestOptions_.timeout = 0;
28812 } else {
28813 _this3.requestOptions_.timeout = requestTimeout;
28814 } // if this isn't a live video and preload permits, start
28815 // downloading segments
28816
28817
28818 if (media.endList && _this3.tech_.preload() !== 'none') {
28819 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28820
28821 _this3.mainSegmentLoader_.load();
28822 }
28823
28824 setupMediaGroups({
28825 sourceType: _this3.sourceType_,
28826 segmentLoaders: {
28827 AUDIO: _this3.audioSegmentLoader_,
28828 SUBTITLES: _this3.subtitleSegmentLoader_,
28829 main: _this3.mainSegmentLoader_
28830 },
28831 tech: _this3.tech_,
28832 requestOptions: _this3.requestOptions_,
28833 masterPlaylistLoader: _this3.masterPlaylistLoader_,
28834 vhs: _this3.vhs_,
28835 master: _this3.master(),
28836 mediaTypes: _this3.mediaTypes_,
28837 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
28838 });
28839
28840 _this3.triggerPresenceUsage_(_this3.master(), media);
28841
28842 _this3.setupFirstPlay();
28843
28844 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
28845 _this3.trigger('selectedinitialmedia');
28846 } else {
28847 // We must wait for the active audio playlist loader to
28848 // finish setting up before triggering this event so the
28849 // representations API and EME setup is correct
28850 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
28851 _this3.trigger('selectedinitialmedia');
28852 });
28853 }
28854 });
28855 this.masterPlaylistLoader_.on('loadedplaylist', function () {
28856 if (_this3.loadOnPlay_) {
28857 _this3.tech_.off('play', _this3.loadOnPlay_);
28858 }
28859
28860 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
28861
28862 if (!updatedPlaylist) {
28863 // exclude any variants that are not supported by the browser before selecting
28864 // an initial media as the playlist selectors do not consider browser support
28865 _this3.excludeUnsupportedVariants_();
28866
28867 var selectedMedia;
28868
28869 if (_this3.enableLowInitialPlaylist) {
28870 selectedMedia = _this3.selectInitialPlaylist();
28871 }
28872
28873 if (!selectedMedia) {
28874 selectedMedia = _this3.selectPlaylist();
28875 }
28876
28877 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
28878 return;
28879 }
28880
28881 _this3.initialMedia_ = selectedMedia;
28882
28883 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
28884 // fire again since the playlist will be requested. In the case of vhs-json
28885 // (where the manifest object is provided as the source), when the media
28886 // playlist's `segments` list is already available, a media playlist won't be
28887 // requested, and loadedplaylist won't fire again, so the playlist handler must be
28888 // called on its own here.
28889
28890
28891 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
28892
28893 if (!haveJsonSource) {
28894 return;
28895 }
28896
28897 updatedPlaylist = _this3.initialMedia_;
28898 }
28899
28900 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
28901 });
28902 this.masterPlaylistLoader_.on('error', function () {
28903 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
28904 });
28905 this.masterPlaylistLoader_.on('mediachanging', function () {
28906 _this3.mainSegmentLoader_.abort();
28907
28908 _this3.mainSegmentLoader_.pause();
28909 });
28910 this.masterPlaylistLoader_.on('mediachange', function () {
28911 var media = _this3.masterPlaylistLoader_.media();
28912
28913 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
28914 // timeout the request.
28915
28916 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
28917 _this3.requestOptions_.timeout = 0;
28918 } else {
28919 _this3.requestOptions_.timeout = requestTimeout;
28920 }
28921
28922 _this3.masterPlaylistLoader_.load(); // TODO: Create a new event on the PlaylistLoader that signals
28923 // that the segments have changed in some way and use that to
28924 // update the SegmentLoader instead of doing it twice here and
28925 // on `loadedplaylist`
28926
28927
28928 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
28929
28930 _this3.mainSegmentLoader_.load();
28931
28932 _this3.tech_.trigger({
28933 type: 'mediachange',
28934 bubbles: true
28935 });
28936 });
28937 this.masterPlaylistLoader_.on('playlistunchanged', function () {
28938 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
28939 // excluded for not-changing. We likely just have a really slowly updating
28940 // playlist.
28941
28942
28943 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
28944 return;
28945 }
28946
28947 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
28948
28949 if (playlistOutdated) {
28950 // Playlist has stopped updating and we're stuck at its end. Try to
28951 // blacklist it and switch to another playlist in the hope that that
28952 // one is updating (and give the player a chance to re-adjust to the
28953 // safe live point).
28954 _this3.blacklistCurrentPlaylist({
28955 message: 'Playlist no longer updating.',
28956 reason: 'playlist-unchanged'
28957 }); // useful for monitoring QoS
28958
28959
28960 _this3.tech_.trigger('playliststuck');
28961 }
28962 });
28963 this.masterPlaylistLoader_.on('renditiondisabled', function () {
28964 _this3.tech_.trigger({
28965 type: 'usage',
28966 name: 'vhs-rendition-disabled'
28967 });
28968
28969 _this3.tech_.trigger({
28970 type: 'usage',
28971 name: 'hls-rendition-disabled'
28972 });
28973 });
28974 this.masterPlaylistLoader_.on('renditionenabled', function () {
28975 _this3.tech_.trigger({
28976 type: 'usage',
28977 name: 'vhs-rendition-enabled'
28978 });
28979
28980 _this3.tech_.trigger({
28981 type: 'usage',
28982 name: 'hls-rendition-enabled'
28983 });
28984 });
28985 }
28986 /**
28987 * Given an updated media playlist (whether it was loaded for the first time, or
28988 * refreshed for live playlists), update any relevant properties and state to reflect
28989 * changes in the media that should be accounted for (e.g., cues and duration).
28990 *
28991 * @param {Object} updatedPlaylist the updated media playlist object
28992 *
28993 * @private
28994 */
28995 ;
28996
28997 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
28998 if (this.useCueTags_) {
28999 this.updateAdCues_(updatedPlaylist);
29000 } // TODO: Create a new event on the PlaylistLoader that signals
29001 // that the segments have changed in some way and use that to
29002 // update the SegmentLoader instead of doing it twice here and
29003 // on `mediachange`
29004
29005
29006 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
29007 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
29008 // as it is possible that it was temporarily stopped while waiting for
29009 // a playlist (e.g., in case the playlist errored and we re-requested it).
29010
29011 if (!this.tech_.paused()) {
29012 this.mainSegmentLoader_.load();
29013
29014 if (this.audioSegmentLoader_) {
29015 this.audioSegmentLoader_.load();
29016 }
29017 }
29018 }
29019 /**
29020 * A helper function for triggerring presence usage events once per source
29021 *
29022 * @private
29023 */
29024 ;
29025
29026 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
29027 var mediaGroups = master.mediaGroups || {};
29028 var defaultDemuxed = true;
29029 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
29030
29031 for (var mediaGroup in mediaGroups.AUDIO) {
29032 for (var label in mediaGroups.AUDIO[mediaGroup]) {
29033 var properties = mediaGroups.AUDIO[mediaGroup][label];
29034
29035 if (!properties.uri) {
29036 defaultDemuxed = false;
29037 }
29038 }
29039 }
29040
29041 if (defaultDemuxed) {
29042 this.tech_.trigger({
29043 type: 'usage',
29044 name: 'vhs-demuxed'
29045 });
29046 this.tech_.trigger({
29047 type: 'usage',
29048 name: 'hls-demuxed'
29049 });
29050 }
29051
29052 if (Object.keys(mediaGroups.SUBTITLES).length) {
29053 this.tech_.trigger({
29054 type: 'usage',
29055 name: 'vhs-webvtt'
29056 });
29057 this.tech_.trigger({
29058 type: 'usage',
29059 name: 'hls-webvtt'
29060 });
29061 }
29062
29063 if (Vhs$1.Playlist.isAes(media)) {
29064 this.tech_.trigger({
29065 type: 'usage',
29066 name: 'vhs-aes'
29067 });
29068 this.tech_.trigger({
29069 type: 'usage',
29070 name: 'hls-aes'
29071 });
29072 }
29073
29074 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
29075 this.tech_.trigger({
29076 type: 'usage',
29077 name: 'vhs-alternate-audio'
29078 });
29079 this.tech_.trigger({
29080 type: 'usage',
29081 name: 'hls-alternate-audio'
29082 });
29083 }
29084
29085 if (this.useCueTags_) {
29086 this.tech_.trigger({
29087 type: 'usage',
29088 name: 'vhs-playlist-cue-tags'
29089 });
29090 this.tech_.trigger({
29091 type: 'usage',
29092 name: 'hls-playlist-cue-tags'
29093 });
29094 }
29095 };
29096
29097 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
29098 var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
29099 var currentTime = this.tech_.currentTime();
29100 var bufferLowWaterLine = this.bufferLowWaterLine();
29101 var bufferHighWaterLine = this.bufferHighWaterLine();
29102 var buffered = this.tech_.buffered();
29103 return shouldSwitchToMedia({
29104 buffered: buffered,
29105 currentTime: currentTime,
29106 currentPlaylist: currentPlaylist,
29107 nextPlaylist: nextPlaylist,
29108 bufferLowWaterLine: bufferLowWaterLine,
29109 bufferHighWaterLine: bufferHighWaterLine,
29110 duration: this.duration(),
29111 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
29112 log: this.logger_
29113 });
29114 }
29115 /**
29116 * Register event handlers on the segment loaders. A helper function
29117 * for construction time.
29118 *
29119 * @private
29120 */
29121 ;
29122
29123 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
29124 var _this4 = this;
29125
29126 this.mainSegmentLoader_.on('bandwidthupdate', function () {
29127 // Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's
29128 // useful to check to see if a rendition switch should be made.
29129 _this4.checkABR_('bandwidthupdate');
29130
29131 _this4.tech_.trigger('bandwidthupdate');
29132 });
29133 this.mainSegmentLoader_.on('timeout', function () {
29134 if (_this4.experimentalBufferBasedABR) {
29135 // If a rendition change is needed, then it would've be done on `bandwidthupdate`.
29136 // Here the only consideration is that for buffer based ABR there's no guarantee
29137 // of an immediate switch (since the bandwidth is averaged with a timeout
29138 // bandwidth value of 1), so force a load on the segment loader to keep it going.
29139 _this4.mainSegmentLoader_.load();
29140 }
29141 }); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer
29142 // based ABR.
29143
29144 if (!this.experimentalBufferBasedABR) {
29145 this.mainSegmentLoader_.on('progress', function () {
29146 _this4.trigger('progress');
29147 });
29148 }
29149
29150 this.mainSegmentLoader_.on('error', function () {
29151 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
29152 });
29153 this.mainSegmentLoader_.on('appenderror', function () {
29154 _this4.error = _this4.mainSegmentLoader_.error_;
29155
29156 _this4.trigger('error');
29157 });
29158 this.mainSegmentLoader_.on('syncinfoupdate', function () {
29159 _this4.onSyncInfoUpdate_();
29160 });
29161 this.mainSegmentLoader_.on('timestampoffset', function () {
29162 _this4.tech_.trigger({
29163 type: 'usage',
29164 name: 'vhs-timestamp-offset'
29165 });
29166
29167 _this4.tech_.trigger({
29168 type: 'usage',
29169 name: 'hls-timestamp-offset'
29170 });
29171 });
29172 this.audioSegmentLoader_.on('syncinfoupdate', function () {
29173 _this4.onSyncInfoUpdate_();
29174 });
29175 this.audioSegmentLoader_.on('appenderror', function () {
29176 _this4.error = _this4.audioSegmentLoader_.error_;
29177
29178 _this4.trigger('error');
29179 });
29180 this.mainSegmentLoader_.on('ended', function () {
29181 _this4.logger_('main segment loader ended');
29182
29183 _this4.onEndOfStream();
29184 });
29185 this.mainSegmentLoader_.on('earlyabort', function (event) {
29186 // never try to early abort with the new ABR algorithm
29187 if (_this4.experimentalBufferBasedABR) {
29188 return;
29189 }
29190
29191 _this4.delegateLoaders_('all', ['abort']);
29192
29193 _this4.blacklistCurrentPlaylist({
29194 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
29195 }, ABORT_EARLY_BLACKLIST_SECONDS);
29196 });
29197
29198 var updateCodecs = function updateCodecs() {
29199 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
29200 return _this4.tryToCreateSourceBuffers_();
29201 }
29202
29203 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
29204
29205
29206 if (!codecs) {
29207 return;
29208 }
29209
29210 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
29211 };
29212
29213 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
29214 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
29215 this.mainSegmentLoader_.on('fmp4', function () {
29216 if (!_this4.triggeredFmp4Usage) {
29217 _this4.tech_.trigger({
29218 type: 'usage',
29219 name: 'vhs-fmp4'
29220 });
29221
29222 _this4.tech_.trigger({
29223 type: 'usage',
29224 name: 'hls-fmp4'
29225 });
29226
29227 _this4.triggeredFmp4Usage = true;
29228 }
29229 });
29230 this.audioSegmentLoader_.on('fmp4', function () {
29231 if (!_this4.triggeredFmp4Usage) {
29232 _this4.tech_.trigger({
29233 type: 'usage',
29234 name: 'vhs-fmp4'
29235 });
29236
29237 _this4.tech_.trigger({
29238 type: 'usage',
29239 name: 'hls-fmp4'
29240 });
29241
29242 _this4.triggeredFmp4Usage = true;
29243 }
29244 });
29245 this.audioSegmentLoader_.on('ended', function () {
29246 _this4.logger_('audioSegmentLoader ended');
29247
29248 _this4.onEndOfStream();
29249 });
29250 };
29251
29252 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
29253 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
29254 }
29255 /**
29256 * Call load on our SegmentLoaders
29257 */
29258 ;
29259
29260 _proto.load = function load() {
29261 this.mainSegmentLoader_.load();
29262
29263 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29264 this.audioSegmentLoader_.load();
29265 }
29266
29267 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29268 this.subtitleSegmentLoader_.load();
29269 }
29270 }
29271 /**
29272 * Re-tune playback quality level for the current player
29273 * conditions without performing destructive actions, like
29274 * removing already buffered content
29275 *
29276 * @private
29277 * @deprecated
29278 */
29279 ;
29280
29281 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
29282 if (media === void 0) {
29283 media = this.selectPlaylist();
29284 }
29285
29286 this.fastQualityChange_(media);
29287 }
29288 /**
29289 * Re-tune playback quality level for the current player
29290 * conditions. This method will perform destructive actions like removing
29291 * already buffered content in order to readjust the currently active
29292 * playlist quickly. This is good for manual quality changes
29293 *
29294 * @private
29295 */
29296 ;
29297
29298 _proto.fastQualityChange_ = function fastQualityChange_(media) {
29299 var _this5 = this;
29300
29301 if (media === void 0) {
29302 media = this.selectPlaylist();
29303 }
29304
29305 if (media === this.masterPlaylistLoader_.media()) {
29306 this.logger_('skipping fastQualityChange because new media is same as old');
29307 return;
29308 }
29309
29310 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
29311 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
29312 // ahead is roughly the minimum that will accomplish this across a variety of content
29313 // in IE and Edge, but seeking in place is sufficient on all other browsers)
29314 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
29315 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
29316
29317 this.mainSegmentLoader_.resetEverything(function () {
29318 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
29319 // from the previously enabled rendition to load before the new playlist has finished loading
29320 if (videojs__default["default"].browser.IE_VERSION || videojs__default["default"].browser.IS_EDGE) {
29321 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
29322 } else {
29323 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
29324 }
29325 }); // don't need to reset audio as it is reset when media changes
29326 }
29327 /**
29328 * Begin playback.
29329 */
29330 ;
29331
29332 _proto.play = function play() {
29333 if (this.setupFirstPlay()) {
29334 return;
29335 }
29336
29337 if (this.tech_.ended()) {
29338 this.tech_.setCurrentTime(0);
29339 }
29340
29341 if (this.hasPlayed_) {
29342 this.load();
29343 }
29344
29345 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
29346 // seek forward to the live point
29347
29348 if (this.tech_.duration() === Infinity) {
29349 if (this.tech_.currentTime() < seekable.start(0)) {
29350 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
29351 }
29352 }
29353 }
29354 /**
29355 * Seek to the latest media position if this is a live video and the
29356 * player and video are loaded and initialized.
29357 */
29358 ;
29359
29360 _proto.setupFirstPlay = function setupFirstPlay() {
29361 var _this6 = this;
29362
29363 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
29364 // If 1) there is no active media
29365 // 2) the player is paused
29366 // 3) the first play has already been setup
29367 // then exit early
29368
29369 if (!media || this.tech_.paused() || this.hasPlayed_) {
29370 return false;
29371 } // when the video is a live stream
29372
29373
29374 if (!media.endList) {
29375 var seekable = this.seekable();
29376
29377 if (!seekable.length) {
29378 // without a seekable range, the player cannot seek to begin buffering at the live
29379 // point
29380 return false;
29381 }
29382
29383 if (videojs__default["default"].browser.IE_VERSION && this.tech_.readyState() === 0) {
29384 // IE11 throws an InvalidStateError if you try to set currentTime while the
29385 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
29386 this.tech_.one('loadedmetadata', function () {
29387 _this6.trigger('firstplay');
29388
29389 _this6.tech_.setCurrentTime(seekable.end(0));
29390
29391 _this6.hasPlayed_ = true;
29392 });
29393 return false;
29394 } // trigger firstplay to inform the source handler to ignore the next seek event
29395
29396
29397 this.trigger('firstplay'); // seek to the live point
29398
29399 this.tech_.setCurrentTime(seekable.end(0));
29400 }
29401
29402 this.hasPlayed_ = true; // we can begin loading now that everything is ready
29403
29404 this.load();
29405 return true;
29406 }
29407 /**
29408 * handle the sourceopen event on the MediaSource
29409 *
29410 * @private
29411 */
29412 ;
29413
29414 _proto.handleSourceOpen_ = function handleSourceOpen_() {
29415 // Only attempt to create the source buffer if none already exist.
29416 // handleSourceOpen is also called when we are "re-opening" a source buffer
29417 // after `endOfStream` has been called (in response to a seek for instance)
29418 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
29419 // code in video.js but is required because play() must be invoked
29420 // *after* the media source has opened.
29421
29422 if (this.tech_.autoplay()) {
29423 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
29424 // on browsers which return a promise
29425
29426 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
29427 playPromise.then(null, function (e) {});
29428 }
29429 }
29430
29431 this.trigger('sourceopen');
29432 }
29433 /**
29434 * handle the sourceended event on the MediaSource
29435 *
29436 * @private
29437 */
29438 ;
29439
29440 _proto.handleSourceEnded_ = function handleSourceEnded_() {
29441 if (!this.inbandTextTracks_.metadataTrack_) {
29442 return;
29443 }
29444
29445 var cues = this.inbandTextTracks_.metadataTrack_.cues;
29446
29447 if (!cues || !cues.length) {
29448 return;
29449 }
29450
29451 var duration = this.duration();
29452 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
29453 }
29454 /**
29455 * handle the durationchange event on the MediaSource
29456 *
29457 * @private
29458 */
29459 ;
29460
29461 _proto.handleDurationChange_ = function handleDurationChange_() {
29462 this.tech_.trigger('durationchange');
29463 }
29464 /**
29465 * Calls endOfStream on the media source when all active stream types have called
29466 * endOfStream
29467 *
29468 * @param {string} streamType
29469 * Stream type of the segment loader that called endOfStream
29470 * @private
29471 */
29472 ;
29473
29474 _proto.onEndOfStream = function onEndOfStream() {
29475 var isEndOfStream = this.mainSegmentLoader_.ended_;
29476
29477 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29478 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
29479
29480 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
29481 // if we do not know if the main segment loader contains video yet or if we
29482 // definitively know the main segment loader contains video, then we need to wait
29483 // for both main and audio segment loaders to call endOfStream
29484 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
29485 } else {
29486 // otherwise just rely on the audio loader
29487 isEndOfStream = this.audioSegmentLoader_.ended_;
29488 }
29489 }
29490
29491 if (!isEndOfStream) {
29492 return;
29493 }
29494
29495 this.stopABRTimer_();
29496 this.sourceUpdater_.endOfStream();
29497 }
29498 /**
29499 * Check if a playlist has stopped being updated
29500 *
29501 * @param {Object} playlist the media playlist object
29502 * @return {boolean} whether the playlist has stopped being updated or not
29503 */
29504 ;
29505
29506 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
29507 var seekable = this.seekable();
29508
29509 if (!seekable.length) {
29510 // playlist doesn't have enough information to determine whether we are stuck
29511 return false;
29512 }
29513
29514 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
29515
29516 if (expired === null) {
29517 return false;
29518 } // does not use the safe live end to calculate playlist end, since we
29519 // don't want to say we are stuck while there is still content
29520
29521
29522 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
29523 var currentTime = this.tech_.currentTime();
29524 var buffered = this.tech_.buffered();
29525
29526 if (!buffered.length) {
29527 // return true if the playhead reached the absolute end of the playlist
29528 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
29529 }
29530
29531 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
29532 // end of playlist
29533
29534 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
29535 }
29536 /**
29537 * Blacklists a playlist when an error occurs for a set amount of time
29538 * making it unavailable for selection by the rendition selection algorithm
29539 * and then forces a new playlist (rendition) selection.
29540 *
29541 * @param {Object=} error an optional error that may include the playlist
29542 * to blacklist
29543 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
29544 * playlist
29545 */
29546 ;
29547
29548 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
29549 if (error === void 0) {
29550 error = {};
29551 }
29552
29553 // If the `error` was generated by the playlist loader, it will contain
29554 // the playlist we were trying to load (but failed) and that should be
29555 // blacklisted instead of the currently selected playlist which is likely
29556 // out-of-date in this scenario
29557 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
29558 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
29559 // trying to load the master OR while we were disposing of the tech
29560
29561 if (!currentPlaylist) {
29562 this.error = error;
29563
29564 if (this.mediaSource.readyState !== 'open') {
29565 this.trigger('error');
29566 } else {
29567 this.sourceUpdater_.endOfStream('network');
29568 }
29569
29570 return;
29571 }
29572
29573 currentPlaylist.playlistErrors_++;
29574 var playlists = this.masterPlaylistLoader_.master.playlists;
29575 var enabledPlaylists = playlists.filter(isEnabled);
29576 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
29577 // forever
29578
29579 if (playlists.length === 1 && blacklistDuration !== Infinity) {
29580 videojs__default["default"].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
29581 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
29582
29583 return this.masterPlaylistLoader_.load(isFinalRendition);
29584 }
29585
29586 if (isFinalRendition) {
29587 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
29588 // it, instead of erring the player or retrying this playlist, clear out the current
29589 // blacklist. This allows other playlists to be attempted in case any have been
29590 // fixed.
29591 var reincluded = false;
29592 playlists.forEach(function (playlist) {
29593 // skip current playlist which is about to be blacklisted
29594 if (playlist === currentPlaylist) {
29595 return;
29596 }
29597
29598 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
29599
29600 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
29601 reincluded = true;
29602 delete playlist.excludeUntil;
29603 }
29604 });
29605
29606 if (reincluded) {
29607 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
29608 // playlist. This is needed for users relying on the retryplaylist event to catch a
29609 // case where the player might be stuck and looping through "dead" playlists.
29610
29611 this.tech_.trigger('retryplaylist');
29612 }
29613 } // Blacklist this playlist
29614
29615
29616 var excludeUntil;
29617
29618 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
29619 excludeUntil = Infinity;
29620 } else {
29621 excludeUntil = Date.now() + blacklistDuration * 1000;
29622 }
29623
29624 currentPlaylist.excludeUntil = excludeUntil;
29625
29626 if (error.reason) {
29627 currentPlaylist.lastExcludeReason_ = error.reason;
29628 }
29629
29630 this.tech_.trigger('blacklistplaylist');
29631 this.tech_.trigger({
29632 type: 'usage',
29633 name: 'vhs-rendition-blacklisted'
29634 });
29635 this.tech_.trigger({
29636 type: 'usage',
29637 name: 'hls-rendition-blacklisted'
29638 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
29639 // Would be something like media().id !=== currentPlaylist.id and we would need something
29640 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
29641 // from loading a new playlist on any blacklist.
29642 // Select a new playlist
29643
29644 var nextPlaylist = this.selectPlaylist();
29645
29646 if (!nextPlaylist) {
29647 this.error = 'Playback cannot continue. No available working or supported playlists.';
29648 this.trigger('error');
29649 return;
29650 }
29651
29652 var logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
29653 var errorMessage = error.message ? ' ' + error.message : '';
29654 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
29655
29656 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
29657 this.delegateLoaders_('audio', ['abort', 'pause']);
29658 } // if subtitle group changed reset subtitle loaders
29659
29660
29661 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
29662 this.delegateLoaders_('subtitle', ['abort', 'pause']);
29663 }
29664
29665 this.delegateLoaders_('main', ['abort', 'pause']);
29666 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
29667 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
29668
29669 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
29670 }
29671 /**
29672 * Pause all segment/playlist loaders
29673 */
29674 ;
29675
29676 _proto.pauseLoading = function pauseLoading() {
29677 this.delegateLoaders_('all', ['abort', 'pause']);
29678 this.stopABRTimer_();
29679 }
29680 /**
29681 * Call a set of functions in order on playlist loaders, segment loaders,
29682 * or both types of loaders.
29683 *
29684 * @param {string} filter
29685 * Filter loaders that should call fnNames using a string. Can be:
29686 * * all - run on all loaders
29687 * * audio - run on all audio loaders
29688 * * subtitle - run on all subtitle loaders
29689 * * main - run on the main/master loaders
29690 *
29691 * @param {Array|string} fnNames
29692 * A string or array of function names to call.
29693 */
29694 ;
29695
29696 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
29697 var _this7 = this;
29698
29699 var loaders = [];
29700 var dontFilterPlaylist = filter === 'all';
29701
29702 if (dontFilterPlaylist || filter === 'main') {
29703 loaders.push(this.masterPlaylistLoader_);
29704 }
29705
29706 var mediaTypes = [];
29707
29708 if (dontFilterPlaylist || filter === 'audio') {
29709 mediaTypes.push('AUDIO');
29710 }
29711
29712 if (dontFilterPlaylist || filter === 'subtitle') {
29713 mediaTypes.push('CLOSED-CAPTIONS');
29714 mediaTypes.push('SUBTITLES');
29715 }
29716
29717 mediaTypes.forEach(function (mediaType) {
29718 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
29719
29720 if (loader) {
29721 loaders.push(loader);
29722 }
29723 });
29724 ['main', 'audio', 'subtitle'].forEach(function (name) {
29725 var loader = _this7[name + "SegmentLoader_"];
29726
29727 if (loader && (filter === name || filter === 'all')) {
29728 loaders.push(loader);
29729 }
29730 });
29731 loaders.forEach(function (loader) {
29732 return fnNames.forEach(function (fnName) {
29733 if (typeof loader[fnName] === 'function') {
29734 loader[fnName]();
29735 }
29736 });
29737 });
29738 }
29739 /**
29740 * set the current time on all segment loaders
29741 *
29742 * @param {TimeRange} currentTime the current time to set
29743 * @return {TimeRange} the current time
29744 */
29745 ;
29746
29747 _proto.setCurrentTime = function setCurrentTime(currentTime) {
29748 var buffered = findRange(this.tech_.buffered(), currentTime);
29749
29750 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
29751 // return immediately if the metadata is not ready yet
29752 return 0;
29753 } // it's clearly an edge-case but don't thrown an error if asked to
29754 // seek within an empty playlist
29755
29756
29757 if (!this.masterPlaylistLoader_.media().segments) {
29758 return 0;
29759 } // if the seek location is already buffered, continue buffering as usual
29760
29761
29762 if (buffered && buffered.length) {
29763 return currentTime;
29764 } // cancel outstanding requests so we begin buffering at the new
29765 // location
29766
29767
29768 this.mainSegmentLoader_.resetEverything();
29769 this.mainSegmentLoader_.abort();
29770
29771 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29772 this.audioSegmentLoader_.resetEverything();
29773 this.audioSegmentLoader_.abort();
29774 }
29775
29776 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
29777 this.subtitleSegmentLoader_.resetEverything();
29778 this.subtitleSegmentLoader_.abort();
29779 } // start segment loader loading in case they are paused
29780
29781
29782 this.load();
29783 }
29784 /**
29785 * get the current duration
29786 *
29787 * @return {TimeRange} the duration
29788 */
29789 ;
29790
29791 _proto.duration = function duration() {
29792 if (!this.masterPlaylistLoader_) {
29793 return 0;
29794 }
29795
29796 var media = this.masterPlaylistLoader_.media();
29797
29798 if (!media) {
29799 // no playlists loaded yet, so can't determine a duration
29800 return 0;
29801 } // Don't rely on the media source for duration in the case of a live playlist since
29802 // setting the native MediaSource's duration to infinity ends up with consequences to
29803 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29804 //
29805 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29806 // however, few browsers have support for setLiveSeekableRange()
29807 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29808 //
29809 // Until a time when the duration of the media source can be set to infinity, and a
29810 // seekable range specified across browsers, just return Infinity.
29811
29812
29813 if (!media.endList) {
29814 return Infinity;
29815 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
29816 // available). If it's not available, fall back to a playlist-calculated estimate.
29817
29818
29819 if (this.mediaSource) {
29820 return this.mediaSource.duration;
29821 }
29822
29823 return Vhs$1.Playlist.duration(media);
29824 }
29825 /**
29826 * check the seekable range
29827 *
29828 * @return {TimeRange} the seekable range
29829 */
29830 ;
29831
29832 _proto.seekable = function seekable() {
29833 return this.seekable_;
29834 };
29835
29836 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
29837 var audioSeekable; // TODO check for creation of both source buffers before updating seekable
29838 //
29839 // A fix was made to this function where a check for
29840 // this.sourceUpdater_.hasCreatedSourceBuffers
29841 // was added to ensure that both source buffers were created before seekable was
29842 // updated. However, it originally had a bug where it was checking for a true and
29843 // returning early instead of checking for false. Setting it to check for false to
29844 // return early though created other issues. A call to play() would check for seekable
29845 // end without verifying that a seekable range was present. In addition, even checking
29846 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
29847 // due to a media update calling load on the segment loaders, skipping a seek to live,
29848 // thereby starting live streams at the beginning of the stream rather than at the end.
29849 //
29850 // This conditional should be fixed to wait for the creation of two source buffers at
29851 // the same time as the other sections of code are fixed to properly seek to live and
29852 // not throw an error due to checking for a seekable end when no seekable range exists.
29853 //
29854 // For now, fall back to the older behavior, with the understanding that the seekable
29855 // range may not be completely correct, leading to a suboptimal initial live point.
29856
29857 if (!this.masterPlaylistLoader_) {
29858 return;
29859 }
29860
29861 var media = this.masterPlaylistLoader_.media();
29862
29863 if (!media) {
29864 return;
29865 }
29866
29867 var expired = this.syncController_.getExpiredTime(media, this.duration());
29868
29869 if (expired === null) {
29870 // not enough information to update seekable
29871 return;
29872 }
29873
29874 var master = this.masterPlaylistLoader_.master;
29875 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29876
29877 if (mainSeekable.length === 0) {
29878 return;
29879 }
29880
29881 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
29882 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
29883 expired = this.syncController_.getExpiredTime(media, this.duration());
29884
29885 if (expired === null) {
29886 return;
29887 }
29888
29889 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
29890
29891 if (audioSeekable.length === 0) {
29892 return;
29893 }
29894 }
29895
29896 var oldEnd;
29897 var oldStart;
29898
29899 if (this.seekable_ && this.seekable_.length) {
29900 oldEnd = this.seekable_.end(0);
29901 oldStart = this.seekable_.start(0);
29902 }
29903
29904 if (!audioSeekable) {
29905 // seekable has been calculated based on buffering video data so it
29906 // can be returned directly
29907 this.seekable_ = mainSeekable;
29908 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
29909 // seekables are pretty far off, rely on main
29910 this.seekable_ = mainSeekable;
29911 } else {
29912 this.seekable_ = videojs__default["default"].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
29913 } // seekable is the same as last time
29914
29915
29916 if (this.seekable_ && this.seekable_.length) {
29917 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
29918 return;
29919 }
29920 }
29921
29922 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
29923 this.tech_.trigger('seekablechanged');
29924 }
29925 /**
29926 * Update the player duration
29927 */
29928 ;
29929
29930 _proto.updateDuration = function updateDuration(isLive) {
29931 if (this.updateDuration_) {
29932 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
29933 this.updateDuration_ = null;
29934 }
29935
29936 if (this.mediaSource.readyState !== 'open') {
29937 this.updateDuration_ = this.updateDuration.bind(this, isLive);
29938 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
29939 return;
29940 }
29941
29942 if (isLive) {
29943 var seekable = this.seekable();
29944
29945 if (!seekable.length) {
29946 return;
29947 } // Even in the case of a live playlist, the native MediaSource's duration should not
29948 // be set to Infinity (even though this would be expected for a live playlist), since
29949 // setting the native MediaSource's duration to infinity ends up with consequences to
29950 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
29951 //
29952 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
29953 // however, few browsers have support for setLiveSeekableRange()
29954 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
29955 //
29956 // Until a time when the duration of the media source can be set to infinity, and a
29957 // seekable range specified across browsers, the duration should be greater than or
29958 // equal to the last possible seekable value.
29959 // MediaSource duration starts as NaN
29960 // It is possible (and probable) that this case will never be reached for many
29961 // sources, since the MediaSource reports duration as the highest value without
29962 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
29963 // we buffered times 0 to 100 with real times of 100 to 200, even though current
29964 // time will be between 0 and 100, the native media source may report the duration
29965 // as 200. However, since we report duration separate from the media source (as
29966 // Infinity), and as long as the native media source duration value is greater than
29967 // our reported seekable range, seeks will work as expected. The large number as
29968 // duration for live is actually a strategy used by some players to work around the
29969 // issue of live seekable ranges cited above.
29970
29971
29972 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
29973 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
29974 }
29975
29976 return;
29977 }
29978
29979 var buffered = this.tech_.buffered();
29980 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
29981
29982 if (buffered.length > 0) {
29983 duration = Math.max(duration, buffered.end(buffered.length - 1));
29984 }
29985
29986 if (this.mediaSource.duration !== duration) {
29987 this.sourceUpdater_.setDuration(duration);
29988 }
29989 }
29990 /**
29991 * dispose of the MasterPlaylistController and everything
29992 * that it controls
29993 */
29994 ;
29995
29996 _proto.dispose = function dispose() {
29997 var _this8 = this;
29998
29999 this.trigger('dispose');
30000 this.decrypter_.terminate();
30001 this.masterPlaylistLoader_.dispose();
30002 this.mainSegmentLoader_.dispose();
30003
30004 if (this.loadOnPlay_) {
30005 this.tech_.off('play', this.loadOnPlay_);
30006 }
30007
30008 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
30009 var groups = _this8.mediaTypes_[type].groups;
30010
30011 for (var id in groups) {
30012 groups[id].forEach(function (group) {
30013 if (group.playlistLoader) {
30014 group.playlistLoader.dispose();
30015 }
30016 });
30017 }
30018 });
30019 this.audioSegmentLoader_.dispose();
30020 this.subtitleSegmentLoader_.dispose();
30021 this.sourceUpdater_.dispose();
30022 this.timelineChangeController_.dispose();
30023 this.stopABRTimer_();
30024
30025 if (this.updateDuration_) {
30026 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
30027 }
30028
30029 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
30030
30031 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
30032 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
30033 this.off();
30034 }
30035 /**
30036 * return the master playlist object if we have one
30037 *
30038 * @return {Object} the master playlist object that we parsed
30039 */
30040 ;
30041
30042 _proto.master = function master() {
30043 return this.masterPlaylistLoader_.master;
30044 }
30045 /**
30046 * return the currently selected playlist
30047 *
30048 * @return {Object} the currently selected playlist object that we parsed
30049 */
30050 ;
30051
30052 _proto.media = function media() {
30053 // playlist loader will not return media if it has not been fully loaded
30054 return this.masterPlaylistLoader_.media() || this.initialMedia_;
30055 };
30056
30057 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
30058 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
30059 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
30060 // otherwise check on the segment loader.
30061
30062 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
30063
30064 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
30065 return false;
30066 }
30067
30068 return true;
30069 };
30070
30071 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
30072 var _this9 = this;
30073
30074 var media = {
30075 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
30076 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
30077 };
30078 var playlist = this.mainSegmentLoader_.getPendingSegmentPlaylist() || this.media(); // set "main" media equal to video
30079
30080 media.video = media.main;
30081 var playlistCodecs = codecsForPlaylist(this.master(), playlist);
30082 var codecs = {};
30083 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
30084
30085 if (media.main.hasVideo) {
30086 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
30087 }
30088
30089 if (media.main.isMuxed) {
30090 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
30091 }
30092
30093 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
30094 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
30095
30096 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
30097 } // no codecs, no playback.
30098
30099
30100 if (!codecs.audio && !codecs.video) {
30101 this.blacklistCurrentPlaylist({
30102 playlist: playlist,
30103 message: 'Could not determine codecs for playlist.',
30104 blacklistDuration: Infinity
30105 });
30106 return;
30107 } // fmp4 relies on browser support, while ts relies on muxer support
30108
30109
30110 var supportFunction = function supportFunction(isFmp4, codec) {
30111 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
30112 };
30113
30114 var unsupportedCodecs = {};
30115 var unsupportedAudio;
30116 ['video', 'audio'].forEach(function (type) {
30117 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
30118 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
30119 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
30120 unsupportedCodecs[supporter].push(codecs[type]);
30121
30122 if (type === 'audio') {
30123 unsupportedAudio = supporter;
30124 }
30125 }
30126 });
30127
30128 if (usingAudioLoader && unsupportedAudio && playlist.attributes.AUDIO) {
30129 var audioGroup = playlist.attributes.AUDIO;
30130 this.master().playlists.forEach(function (variant) {
30131 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
30132
30133 if (variantAudioGroup === audioGroup && variant !== playlist) {
30134 variant.excludeUntil = Infinity;
30135 }
30136 });
30137 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
30138 } // if we have any unsupported codecs blacklist this playlist.
30139
30140
30141 if (Object.keys(unsupportedCodecs).length) {
30142 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
30143 if (acc) {
30144 acc += ', ';
30145 }
30146
30147 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
30148 return acc;
30149 }, '') + '.';
30150 this.blacklistCurrentPlaylist({
30151 playlist: playlist,
30152 internal: true,
30153 message: message,
30154 blacklistDuration: Infinity
30155 });
30156 return;
30157 } // check if codec switching is happening
30158
30159
30160 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
30161 var switchMessages = [];
30162 ['video', 'audio'].forEach(function (type) {
30163 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
30164 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
30165
30166 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
30167 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
30168 }
30169 });
30170
30171 if (switchMessages.length) {
30172 this.blacklistCurrentPlaylist({
30173 playlist: playlist,
30174 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
30175 blacklistDuration: Infinity,
30176 internal: true
30177 });
30178 return;
30179 }
30180 } // TODO: when using the muxer shouldn't we just return
30181 // the codecs that the muxer outputs?
30182
30183
30184 return codecs;
30185 }
30186 /**
30187 * Create source buffers and exlude any incompatible renditions.
30188 *
30189 * @private
30190 */
30191 ;
30192
30193 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
30194 // media source is not ready yet or sourceBuffers are already
30195 // created.
30196 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
30197 return;
30198 }
30199
30200 if (!this.areMediaTypesKnown_()) {
30201 return;
30202 }
30203
30204 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
30205
30206 if (!codecs) {
30207 return;
30208 }
30209
30210 this.sourceUpdater_.createSourceBuffers(codecs);
30211 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
30212 this.excludeIncompatibleVariants_(codecString);
30213 }
30214 /**
30215 * Excludes playlists with codecs that are unsupported by the muxer and browser.
30216 */
30217 ;
30218
30219 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
30220 var _this10 = this;
30221
30222 var playlists = this.master().playlists;
30223 var ids = []; // TODO: why don't we have a property to loop through all
30224 // playlist? Why did we ever mix indexes and keys?
30225
30226 Object.keys(playlists).forEach(function (key) {
30227 var variant = playlists[key]; // check if we already processed this playlist.
30228
30229 if (ids.indexOf(variant.id) !== -1) {
30230 return;
30231 }
30232
30233 ids.push(variant.id);
30234 var codecs = codecsForPlaylist(_this10.master, variant);
30235 var unsupported = [];
30236
30237 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
30238 unsupported.push("audio codec " + codecs.audio);
30239 }
30240
30241 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
30242 unsupported.push("video codec " + codecs.video);
30243 }
30244
30245 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
30246 unsupported.push("text codec " + codecs.text);
30247 }
30248
30249 if (unsupported.length) {
30250 variant.excludeUntil = Infinity;
30251
30252 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
30253 }
30254 });
30255 }
30256 /**
30257 * Blacklist playlists that are known to be codec or
30258 * stream-incompatible with the SourceBuffer configuration. For
30259 * instance, Media Source Extensions would cause the video element to
30260 * stall waiting for video data if you switched from a variant with
30261 * video and audio to an audio-only one.
30262 *
30263 * @param {Object} media a media playlist compatible with the current
30264 * set of SourceBuffers. Variants in the current master playlist that
30265 * do not appear to have compatible codec or stream configurations
30266 * will be excluded from the default playlist selection algorithm
30267 * indefinitely.
30268 * @private
30269 */
30270 ;
30271
30272 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
30273 var _this11 = this;
30274
30275 var ids = [];
30276 var playlists = this.master().playlists;
30277 var codecs = unwrapCodecList(parseCodecs(codecString));
30278 var codecCount_ = codecCount(codecs);
30279 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
30280 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
30281 Object.keys(playlists).forEach(function (key) {
30282 var variant = playlists[key]; // check if we already processed this playlist.
30283 // or it if it is already excluded forever.
30284
30285 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
30286 return;
30287 }
30288
30289 ids.push(variant.id);
30290 var blacklistReasons = []; // get codecs from the playlist for this variant
30291
30292 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
30293 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
30294 // variant is incompatible. Wait for mux.js to probe
30295
30296 if (!variantCodecs.audio && !variantCodecs.video) {
30297 return;
30298 } // TODO: we can support this by removing the
30299 // old media source and creating a new one, but it will take some work.
30300 // The number of streams cannot change
30301
30302
30303 if (variantCodecCount !== codecCount_) {
30304 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
30305 } // only exclude playlists by codec change, if codecs cannot switch
30306 // during playback.
30307
30308
30309 if (!_this11.sourceUpdater_.canChangeType()) {
30310 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
30311 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
30312
30313 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
30314 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
30315 } // the audio codec cannot change
30316
30317
30318 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
30319 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
30320 }
30321 }
30322
30323 if (blacklistReasons.length) {
30324 variant.excludeUntil = Infinity;
30325
30326 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
30327 }
30328 });
30329 };
30330
30331 _proto.updateAdCues_ = function updateAdCues_(media) {
30332 var offset = 0;
30333 var seekable = this.seekable();
30334
30335 if (seekable.length) {
30336 offset = seekable.start(0);
30337 }
30338
30339 updateAdCues(media, this.cueTagsTrack_, offset);
30340 }
30341 /**
30342 * Calculates the desired forward buffer length based on current time
30343 *
30344 * @return {number} Desired forward buffer length in seconds
30345 */
30346 ;
30347
30348 _proto.goalBufferLength = function goalBufferLength() {
30349 var currentTime = this.tech_.currentTime();
30350 var initial = Config.GOAL_BUFFER_LENGTH;
30351 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
30352 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
30353 return Math.min(initial + currentTime * rate, max);
30354 }
30355 /**
30356 * Calculates the desired buffer low water line based on current time
30357 *
30358 * @return {number} Desired buffer low water line in seconds
30359 */
30360 ;
30361
30362 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
30363 var currentTime = this.tech_.currentTime();
30364 var initial = Config.BUFFER_LOW_WATER_LINE;
30365 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
30366 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
30367 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
30368 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
30369 };
30370
30371 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
30372 return Config.BUFFER_HIGH_WATER_LINE;
30373 };
30374
30375 return MasterPlaylistController;
30376 }(videojs__default["default"].EventTarget);
30377
30378 /**
30379 * Returns a function that acts as the Enable/disable playlist function.
30380 *
30381 * @param {PlaylistLoader} loader - The master playlist loader
30382 * @param {string} playlistID - id of the playlist
30383 * @param {Function} changePlaylistFn - A function to be called after a
30384 * playlist's enabled-state has been changed. Will NOT be called if a
30385 * playlist's enabled-state is unchanged
30386 * @param {boolean=} enable - Value to set the playlist enabled-state to
30387 * or if undefined returns the current enabled-state for the playlist
30388 * @return {Function} Function for setting/getting enabled
30389 */
30390
30391 var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
30392 return function (enable) {
30393 var playlist = loader.master.playlists[playlistID];
30394 var incompatible = isIncompatible(playlist);
30395 var currentlyEnabled = isEnabled(playlist);
30396
30397 if (typeof enable === 'undefined') {
30398 return currentlyEnabled;
30399 }
30400
30401 if (enable) {
30402 delete playlist.disabled;
30403 } else {
30404 playlist.disabled = true;
30405 }
30406
30407 if (enable !== currentlyEnabled && !incompatible) {
30408 // Ensure the outside world knows about our changes
30409 changePlaylistFn();
30410
30411 if (enable) {
30412 loader.trigger('renditionenabled');
30413 } else {
30414 loader.trigger('renditiondisabled');
30415 }
30416 }
30417
30418 return enable;
30419 };
30420 };
30421 /**
30422 * The representation object encapsulates the publicly visible information
30423 * in a media playlist along with a setter/getter-type function (enabled)
30424 * for changing the enabled-state of a particular playlist entry
30425 *
30426 * @class Representation
30427 */
30428
30429
30430 var Representation = function Representation(vhsHandler, playlist, id) {
30431 var mpc = vhsHandler.masterPlaylistController_,
30432 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
30433
30434 var changeType = smoothQualityChange ? 'smooth' : 'fast';
30435 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
30436
30437 if (playlist.attributes) {
30438 var resolution = playlist.attributes.RESOLUTION;
30439 this.width = resolution && resolution.width;
30440 this.height = resolution && resolution.height;
30441 this.bandwidth = playlist.attributes.BANDWIDTH;
30442 this.frameRate = playlist.attributes['FRAME-RATE'];
30443 }
30444
30445 this.codecs = codecsForPlaylist(mpc.master(), playlist);
30446 this.playlist = playlist; // The id is simply the ordinality of the media playlist
30447 // within the master playlist
30448
30449 this.id = id; // Partially-apply the enableFunction to create a playlist-
30450 // specific variant
30451
30452 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
30453 };
30454 /**
30455 * A mixin function that adds the `representations` api to an instance
30456 * of the VhsHandler class
30457 *
30458 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
30459 * representation API into
30460 */
30461
30462
30463 var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
30464 // Add a single API-specific function to the VhsHandler instance
30465 vhsHandler.representations = function () {
30466 var master = vhsHandler.masterPlaylistController_.master();
30467 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
30468
30469 if (!playlists) {
30470 return [];
30471 }
30472
30473 return playlists.filter(function (media) {
30474 return !isIncompatible(media);
30475 }).map(function (e, i) {
30476 return new Representation(vhsHandler, e, e.id);
30477 });
30478 };
30479 };
30480
30481 /**
30482 * @file playback-watcher.js
30483 *
30484 * Playback starts, and now my watch begins. It shall not end until my death. I shall
30485 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
30486 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
30487 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
30488 * my life and honor to the Playback Watch, for this Player and all the Players to come.
30489 */
30490
30491 var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
30492 /**
30493 * @class PlaybackWatcher
30494 */
30495
30496 var PlaybackWatcher = /*#__PURE__*/function () {
30497 /**
30498 * Represents an PlaybackWatcher object.
30499 *
30500 * @class
30501 * @param {Object} options an object that includes the tech and settings
30502 */
30503 function PlaybackWatcher(options) {
30504 var _this = this;
30505
30506 this.masterPlaylistController_ = options.masterPlaylistController;
30507 this.tech_ = options.tech;
30508 this.seekable = options.seekable;
30509 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
30510 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
30511 this.media = options.media;
30512 this.consecutiveUpdates = 0;
30513 this.lastRecordedTime = null;
30514 this.timer_ = null;
30515 this.checkCurrentTimeTimeout_ = null;
30516 this.logger_ = logger('PlaybackWatcher');
30517 this.logger_('initialize');
30518
30519 var playHandler = function playHandler() {
30520 return _this.monitorCurrentTime_();
30521 };
30522
30523 var canPlayHandler = function canPlayHandler() {
30524 return _this.monitorCurrentTime_();
30525 };
30526
30527 var waitingHandler = function waitingHandler() {
30528 return _this.techWaiting_();
30529 };
30530
30531 var cancelTimerHandler = function cancelTimerHandler() {
30532 return _this.cancelTimer_();
30533 };
30534
30535 var mpc = this.masterPlaylistController_;
30536 var loaderTypes = ['main', 'subtitle', 'audio'];
30537 var loaderChecks = {};
30538 loaderTypes.forEach(function (type) {
30539 loaderChecks[type] = {
30540 reset: function reset() {
30541 return _this.resetSegmentDownloads_(type);
30542 },
30543 updateend: function updateend() {
30544 return _this.checkSegmentDownloads_(type);
30545 }
30546 };
30547 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
30548 // isn't changing we want to reset. We cannot assume that the new rendition
30549 // will also be stalled, until after new appends.
30550
30551 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
30552 // This prevents one segment playlists (single vtt or single segment content)
30553 // from being detected as stalling. As the buffer will not change in those cases, since
30554 // the buffer is the entire video duration.
30555
30556 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
30557 });
30558 /**
30559 * We check if a seek was into a gap through the following steps:
30560 * 1. We get a seeking event and we do not get a seeked event. This means that
30561 * a seek was attempted but not completed.
30562 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
30563 * removed everything from our buffer and appended a segment, and should be ready
30564 * to check for gaps.
30565 */
30566
30567 var setSeekingHandlers = function setSeekingHandlers(fn) {
30568 ['main', 'audio'].forEach(function (type) {
30569 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
30570 });
30571 };
30572
30573 this.seekingAppendCheck_ = function () {
30574 if (_this.fixesBadSeeks_()) {
30575 _this.consecutiveUpdates = 0;
30576 _this.lastRecordedTime = _this.tech_.currentTime();
30577 setSeekingHandlers('off');
30578 }
30579 };
30580
30581 this.clearSeekingAppendCheck_ = function () {
30582 return setSeekingHandlers('off');
30583 };
30584
30585 this.watchForBadSeeking_ = function () {
30586 _this.clearSeekingAppendCheck_();
30587
30588 setSeekingHandlers('on');
30589 };
30590
30591 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
30592 this.tech_.on('seeking', this.watchForBadSeeking_);
30593 this.tech_.on('waiting', waitingHandler);
30594 this.tech_.on(timerCancelEvents, cancelTimerHandler);
30595 this.tech_.on('canplay', canPlayHandler);
30596 /*
30597 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
30598 is surfaced in one of two ways:
30599 1) The `waiting` event is fired before the player has buffered content, making it impossible
30600 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
30601 we can check if playback is stalled due to a gap, and skip the gap if necessary.
30602 2) A source with a gap at the beginning of the stream is loaded programatically while the player
30603 is in a playing state. To catch this case, it's important that our one-time play listener is setup
30604 even if the player is in a playing state
30605 */
30606
30607 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
30608
30609 this.dispose = function () {
30610 _this.clearSeekingAppendCheck_();
30611
30612 _this.logger_('dispose');
30613
30614 _this.tech_.off('waiting', waitingHandler);
30615
30616 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
30617
30618 _this.tech_.off('canplay', canPlayHandler);
30619
30620 _this.tech_.off('play', playHandler);
30621
30622 _this.tech_.off('seeking', _this.watchForBadSeeking_);
30623
30624 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
30625
30626 loaderTypes.forEach(function (type) {
30627 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
30628 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
30629
30630 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
30631 });
30632
30633 if (_this.checkCurrentTimeTimeout_) {
30634 window.clearTimeout(_this.checkCurrentTimeTimeout_);
30635 }
30636
30637 _this.cancelTimer_();
30638 };
30639 }
30640 /**
30641 * Periodically check current time to see if playback stopped
30642 *
30643 * @private
30644 */
30645
30646
30647 var _proto = PlaybackWatcher.prototype;
30648
30649 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
30650 this.checkCurrentTime_();
30651
30652 if (this.checkCurrentTimeTimeout_) {
30653 window.clearTimeout(this.checkCurrentTimeTimeout_);
30654 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
30655
30656
30657 this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
30658 }
30659 /**
30660 * Reset stalled download stats for a specific type of loader
30661 *
30662 * @param {string} type
30663 * The segment loader type to check.
30664 *
30665 * @listens SegmentLoader#playlistupdate
30666 * @listens Tech#seeking
30667 * @listens Tech#seeked
30668 */
30669 ;
30670
30671 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
30672 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
30673
30674 if (this[type + "StalledDownloads_"] > 0) {
30675 this.logger_("resetting possible stalled download count for " + type + " loader");
30676 }
30677
30678 this[type + "StalledDownloads_"] = 0;
30679 this[type + "Buffered_"] = loader.buffered_();
30680 }
30681 /**
30682 * Checks on every segment `appendsdone` to see
30683 * if segment appends are making progress. If they are not
30684 * and we are still downloading bytes. We blacklist the playlist.
30685 *
30686 * @param {string} type
30687 * The segment loader type to check.
30688 *
30689 * @listens SegmentLoader#appendsdone
30690 */
30691 ;
30692
30693 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
30694 var mpc = this.masterPlaylistController_;
30695 var loader = mpc[type + "SegmentLoader_"];
30696 var buffered = loader.buffered_();
30697 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
30698 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
30699 // the buffered value for this loader changed
30700 // appends are working
30701
30702 if (isBufferedDifferent) {
30703 this.resetSegmentDownloads_(type);
30704 return;
30705 }
30706
30707 this[type + "StalledDownloads_"]++;
30708 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
30709 playlistId: loader.playlist_ && loader.playlist_.id,
30710 buffered: timeRangesToArray(buffered)
30711 }); // after 10 possibly stalled appends with no reset, exclude
30712
30713 if (this[type + "StalledDownloads_"] < 10) {
30714 return;
30715 }
30716
30717 this.logger_(type + " loader stalled download exclusion");
30718 this.resetSegmentDownloads_(type);
30719 this.tech_.trigger({
30720 type: 'usage',
30721 name: "vhs-" + type + "-download-exclusion"
30722 });
30723
30724 if (type === 'subtitle') {
30725 return;
30726 } // TODO: should we exclude audio tracks rather than main tracks
30727 // when type is audio?
30728
30729
30730 mpc.blacklistCurrentPlaylist({
30731 message: "Excessive " + type + " segment downloading detected."
30732 }, Infinity);
30733 }
30734 /**
30735 * The purpose of this function is to emulate the "waiting" event on
30736 * browsers that do not emit it when they are waiting for more
30737 * data to continue playback
30738 *
30739 * @private
30740 */
30741 ;
30742
30743 _proto.checkCurrentTime_ = function checkCurrentTime_() {
30744 if (this.tech_.paused() || this.tech_.seeking()) {
30745 return;
30746 }
30747
30748 var currentTime = this.tech_.currentTime();
30749 var buffered = this.tech_.buffered();
30750
30751 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
30752 // If current time is at the end of the final buffered region, then any playback
30753 // stall is most likely caused by buffering in a low bandwidth environment. The tech
30754 // should fire a `waiting` event in this scenario, but due to browser and tech
30755 // inconsistencies. Calling `techWaiting_` here allows us to simulate
30756 // responding to a native `waiting` event when the tech fails to emit one.
30757 return this.techWaiting_();
30758 }
30759
30760 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
30761 this.consecutiveUpdates++;
30762 this.waiting_();
30763 } else if (currentTime === this.lastRecordedTime) {
30764 this.consecutiveUpdates++;
30765 } else {
30766 this.consecutiveUpdates = 0;
30767 this.lastRecordedTime = currentTime;
30768 }
30769 }
30770 /**
30771 * Cancels any pending timers and resets the 'timeupdate' mechanism
30772 * designed to detect that we are stalled
30773 *
30774 * @private
30775 */
30776 ;
30777
30778 _proto.cancelTimer_ = function cancelTimer_() {
30779 this.consecutiveUpdates = 0;
30780
30781 if (this.timer_) {
30782 this.logger_('cancelTimer_');
30783 clearTimeout(this.timer_);
30784 }
30785
30786 this.timer_ = null;
30787 }
30788 /**
30789 * Fixes situations where there's a bad seek
30790 *
30791 * @return {boolean} whether an action was taken to fix the seek
30792 * @private
30793 */
30794 ;
30795
30796 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
30797 var seeking = this.tech_.seeking();
30798
30799 if (!seeking) {
30800 return false;
30801 } // TODO: It's possible that these seekable checks should be moved out of this function
30802 // and into a function that runs on seekablechange. It's also possible that we only need
30803 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
30804 // seekable range.
30805
30806
30807 var seekable = this.seekable();
30808 var currentTime = this.tech_.currentTime();
30809 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
30810 var seekTo;
30811
30812 if (isAfterSeekableRange) {
30813 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
30814
30815 seekTo = seekableEnd;
30816 }
30817
30818 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30819 var seekableStart = seekable.start(0); // sync to the beginning of the live window
30820 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
30821
30822 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
30823 // happen in live with a 3 segment playlist), then don't use a time delta
30824 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
30825 }
30826
30827 if (typeof seekTo !== 'undefined') {
30828 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
30829 this.tech_.setCurrentTime(seekTo);
30830 return true;
30831 }
30832
30833 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
30834 var buffered = this.tech_.buffered();
30835 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
30836 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
30837 var media = this.media(); // verify that at least two segment durations or one part duration have been
30838 // appended before checking for a gap.
30839
30840 var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
30841 // appended before checking for a gap.
30842
30843 var bufferedToCheck = [audioBuffered, videoBuffered];
30844
30845 for (var i = 0; i < bufferedToCheck.length; i++) {
30846 // skip null buffered
30847 if (!bufferedToCheck[i]) {
30848 continue;
30849 }
30850
30851 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
30852 // duration behind we haven't appended enough to call this a bad seek.
30853
30854 if (timeAhead < minAppendedDuration) {
30855 return false;
30856 }
30857 }
30858
30859 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
30860 // to seek over the gap
30861
30862 if (nextRange.length === 0) {
30863 return false;
30864 }
30865
30866 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
30867 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
30868 this.tech_.setCurrentTime(seekTo);
30869 return true;
30870 }
30871 /**
30872 * Handler for situations when we determine the player is waiting.
30873 *
30874 * @private
30875 */
30876 ;
30877
30878 _proto.waiting_ = function waiting_() {
30879 if (this.techWaiting_()) {
30880 return;
30881 } // All tech waiting checks failed. Use last resort correction
30882
30883
30884 var currentTime = this.tech_.currentTime();
30885 var buffered = this.tech_.buffered();
30886 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
30887 // region with no indication that anything is amiss (seen in Firefox). Seeking to
30888 // currentTime is usually enough to kickstart the player. This checks that the player
30889 // is currently within a buffered region before attempting a corrective seek.
30890 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
30891 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
30892 // make sure there is ~3 seconds of forward buffer before taking any corrective action
30893 // to avoid triggering an `unknownwaiting` event when the network is slow.
30894
30895 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
30896 this.cancelTimer_();
30897 this.tech_.setCurrentTime(currentTime);
30898 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
30899
30900 this.tech_.trigger({
30901 type: 'usage',
30902 name: 'vhs-unknown-waiting'
30903 });
30904 this.tech_.trigger({
30905 type: 'usage',
30906 name: 'hls-unknown-waiting'
30907 });
30908 return;
30909 }
30910 }
30911 /**
30912 * Handler for situations when the tech fires a `waiting` event
30913 *
30914 * @return {boolean}
30915 * True if an action (or none) was needed to correct the waiting. False if no
30916 * checks passed
30917 * @private
30918 */
30919 ;
30920
30921 _proto.techWaiting_ = function techWaiting_() {
30922 var seekable = this.seekable();
30923 var currentTime = this.tech_.currentTime();
30924
30925 if (this.tech_.seeking() || this.timer_ !== null) {
30926 // Tech is seeking or already waiting on another action, no action needed
30927 return true;
30928 }
30929
30930 if (this.beforeSeekableWindow_(seekable, currentTime)) {
30931 var livePoint = seekable.end(seekable.length - 1);
30932 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
30933 this.cancelTimer_();
30934 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
30935
30936 this.tech_.trigger({
30937 type: 'usage',
30938 name: 'vhs-live-resync'
30939 });
30940 this.tech_.trigger({
30941 type: 'usage',
30942 name: 'hls-live-resync'
30943 });
30944 return true;
30945 }
30946
30947 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
30948 var buffered = this.tech_.buffered();
30949 var videoUnderflow = this.videoUnderflow_({
30950 audioBuffered: sourceUpdater.audioBuffered(),
30951 videoBuffered: sourceUpdater.videoBuffered(),
30952 currentTime: currentTime
30953 });
30954
30955 if (videoUnderflow) {
30956 // Even though the video underflowed and was stuck in a gap, the audio overplayed
30957 // the gap, leading currentTime into a buffered range. Seeking to currentTime
30958 // allows the video to catch up to the audio position without losing any audio
30959 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
30960 this.cancelTimer_();
30961 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
30962
30963 this.tech_.trigger({
30964 type: 'usage',
30965 name: 'vhs-video-underflow'
30966 });
30967 this.tech_.trigger({
30968 type: 'usage',
30969 name: 'hls-video-underflow'
30970 });
30971 return true;
30972 }
30973
30974 var nextRange = findNextRange(buffered, currentTime); // check for gap
30975
30976 if (nextRange.length > 0) {
30977 var difference = nextRange.start(0) - currentTime;
30978 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
30979 this.cancelTimer_();
30980 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
30981 return true;
30982 } // All checks failed. Returning false to indicate failure to correct waiting
30983
30984
30985 return false;
30986 };
30987
30988 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
30989 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
30990 allowSeeksWithinUnsafeLiveWindow = false;
30991 }
30992
30993 if (!seekable.length) {
30994 // we can't make a solid case if there's no seekable, default to false
30995 return false;
30996 }
30997
30998 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
30999 var isLive = !playlist.endList;
31000
31001 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
31002 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
31003 }
31004
31005 if (currentTime > allowedEnd) {
31006 return true;
31007 }
31008
31009 return false;
31010 };
31011
31012 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
31013 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
31014 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
31015 return true;
31016 }
31017
31018 return false;
31019 };
31020
31021 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
31022 var videoBuffered = _ref.videoBuffered,
31023 audioBuffered = _ref.audioBuffered,
31024 currentTime = _ref.currentTime;
31025
31026 // audio only content will not have video underflow :)
31027 if (!videoBuffered) {
31028 return;
31029 }
31030
31031 var gap; // find a gap in demuxed content.
31032
31033 if (videoBuffered.length && audioBuffered.length) {
31034 // in Chrome audio will continue to play for ~3s when we run out of video
31035 // so we have to check that the video buffer did have some buffer in the
31036 // past.
31037 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
31038 var videoRange = findRange(videoBuffered, currentTime);
31039 var audioRange = findRange(audioBuffered, currentTime);
31040
31041 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
31042 gap = {
31043 start: lastVideoRange.end(0),
31044 end: audioRange.end(0)
31045 };
31046 } // find a gap in muxed content.
31047
31048 } else {
31049 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
31050 // stuck in a gap due to video underflow.
31051
31052 if (!nextRange.length) {
31053 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
31054 }
31055 }
31056
31057 if (gap) {
31058 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
31059 return true;
31060 }
31061
31062 return false;
31063 }
31064 /**
31065 * Timer callback. If playback still has not proceeded, then we seek
31066 * to the start of the next buffered region.
31067 *
31068 * @private
31069 */
31070 ;
31071
31072 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
31073 var buffered = this.tech_.buffered();
31074 var currentTime = this.tech_.currentTime();
31075 var nextRange = findNextRange(buffered, currentTime);
31076 this.cancelTimer_();
31077
31078 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
31079 return;
31080 }
31081
31082 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
31083
31084 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
31085 this.tech_.trigger({
31086 type: 'usage',
31087 name: 'vhs-gap-skip'
31088 });
31089 this.tech_.trigger({
31090 type: 'usage',
31091 name: 'hls-gap-skip'
31092 });
31093 };
31094
31095 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
31096 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
31097 // playing for ~3 seconds after the video gap starts. This is done to account for
31098 // video buffer underflow/underrun (note that this is not done when there is audio
31099 // buffer underflow/underrun -- in that case the video will stop as soon as it
31100 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
31101 // video stalls). The player's time will reflect the playthrough of audio, so the
31102 // time will appear as if we are in a buffered region, even if we are stuck in a
31103 // "gap."
31104 //
31105 // Example:
31106 // video buffer: 0 => 10.1, 10.2 => 20
31107 // audio buffer: 0 => 20
31108 // overall buffer: 0 => 10.1, 10.2 => 20
31109 // current time: 13
31110 //
31111 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
31112 // however, the audio continued playing until it reached ~3 seconds past the gap
31113 // (13 seconds), at which point it stops as well. Since current time is past the
31114 // gap, findNextRange will return no ranges.
31115 //
31116 // To check for this issue, we see if there is a gap that starts somewhere within
31117 // a 3 second range (3 seconds +/- 1 second) back from our current time.
31118 var gaps = findGaps(buffered);
31119
31120 for (var i = 0; i < gaps.length; i++) {
31121 var start = gaps.start(i);
31122 var end = gaps.end(i); // gap is starts no more than 4 seconds back
31123
31124 if (currentTime - start < 4 && currentTime - start > 2) {
31125 return {
31126 start: start,
31127 end: end
31128 };
31129 }
31130 }
31131
31132 return null;
31133 };
31134
31135 return PlaybackWatcher;
31136 }();
31137
31138 var defaultOptions = {
31139 errorInterval: 30,
31140 getSource: function getSource(next) {
31141 var tech = this.tech({
31142 IWillNotUseThisInPlugins: true
31143 });
31144 var sourceObj = tech.currentSource_ || this.currentSource();
31145 return next(sourceObj);
31146 }
31147 };
31148 /**
31149 * Main entry point for the plugin
31150 *
31151 * @param {Player} player a reference to a videojs Player instance
31152 * @param {Object} [options] an object with plugin options
31153 * @private
31154 */
31155
31156 var initPlugin = function initPlugin(player, options) {
31157 var lastCalled = 0;
31158 var seekTo = 0;
31159 var localOptions = videojs__default["default"].mergeOptions(defaultOptions, options);
31160 player.ready(function () {
31161 player.trigger({
31162 type: 'usage',
31163 name: 'vhs-error-reload-initialized'
31164 });
31165 player.trigger({
31166 type: 'usage',
31167 name: 'hls-error-reload-initialized'
31168 });
31169 });
31170 /**
31171 * Player modifications to perform that must wait until `loadedmetadata`
31172 * has been triggered
31173 *
31174 * @private
31175 */
31176
31177 var loadedMetadataHandler = function loadedMetadataHandler() {
31178 if (seekTo) {
31179 player.currentTime(seekTo);
31180 }
31181 };
31182 /**
31183 * Set the source on the player element, play, and seek if necessary
31184 *
31185 * @param {Object} sourceObj An object specifying the source url and mime-type to play
31186 * @private
31187 */
31188
31189
31190 var setSource = function setSource(sourceObj) {
31191 if (sourceObj === null || sourceObj === undefined) {
31192 return;
31193 }
31194
31195 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
31196 player.one('loadedmetadata', loadedMetadataHandler);
31197 player.src(sourceObj);
31198 player.trigger({
31199 type: 'usage',
31200 name: 'vhs-error-reload'
31201 });
31202 player.trigger({
31203 type: 'usage',
31204 name: 'hls-error-reload'
31205 });
31206 player.play();
31207 };
31208 /**
31209 * Attempt to get a source from either the built-in getSource function
31210 * or a custom function provided via the options
31211 *
31212 * @private
31213 */
31214
31215
31216 var errorHandler = function errorHandler() {
31217 // Do not attempt to reload the source if a source-reload occurred before
31218 // 'errorInterval' time has elapsed since the last source-reload
31219 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
31220 player.trigger({
31221 type: 'usage',
31222 name: 'vhs-error-reload-canceled'
31223 });
31224 player.trigger({
31225 type: 'usage',
31226 name: 'hls-error-reload-canceled'
31227 });
31228 return;
31229 }
31230
31231 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
31232 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
31233 return;
31234 }
31235
31236 lastCalled = Date.now();
31237 return localOptions.getSource.call(player, setSource);
31238 };
31239 /**
31240 * Unbind any event handlers that were bound by the plugin
31241 *
31242 * @private
31243 */
31244
31245
31246 var cleanupEvents = function cleanupEvents() {
31247 player.off('loadedmetadata', loadedMetadataHandler);
31248 player.off('error', errorHandler);
31249 player.off('dispose', cleanupEvents);
31250 };
31251 /**
31252 * Cleanup before re-initializing the plugin
31253 *
31254 * @param {Object} [newOptions] an object with plugin options
31255 * @private
31256 */
31257
31258
31259 var reinitPlugin = function reinitPlugin(newOptions) {
31260 cleanupEvents();
31261 initPlugin(player, newOptions);
31262 };
31263
31264 player.on('error', errorHandler);
31265 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
31266 // initializing the plugin
31267
31268 player.reloadSourceOnError = reinitPlugin;
31269 };
31270 /**
31271 * Reload the source when an error is detected as long as there
31272 * wasn't an error previously within the last 30 seconds
31273 *
31274 * @param {Object} [options] an object with plugin options
31275 */
31276
31277
31278 var reloadSourceOnError = function reloadSourceOnError(options) {
31279 initPlugin(this, options);
31280 };
31281
31282 var version$4 = "2.16.2";
31283
31284 var version$3 = "6.0.1";
31285
31286 var version$2 = "0.22.1";
31287
31288 var version$1 = "4.8.0";
31289
31290 var version = "3.1.3";
31291
31292 var Vhs = {
31293 PlaylistLoader: PlaylistLoader,
31294 Playlist: Playlist,
31295 utils: utils,
31296 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
31297 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
31298 lastBandwidthSelector: lastBandwidthSelector,
31299 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
31300 comparePlaylistBandwidth: comparePlaylistBandwidth,
31301 comparePlaylistResolution: comparePlaylistResolution,
31302 xhr: xhrFactory()
31303 }; // Define getter/setters for config properties
31304
31305 Object.keys(Config).forEach(function (prop) {
31306 Object.defineProperty(Vhs, prop, {
31307 get: function get() {
31308 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31309 return Config[prop];
31310 },
31311 set: function set(value) {
31312 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
31313
31314 if (typeof value !== 'number' || value < 0) {
31315 videojs__default["default"].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
31316 return;
31317 }
31318
31319 Config[prop] = value;
31320 }
31321 });
31322 });
31323 var LOCAL_STORAGE_KEY = 'videojs-vhs';
31324 /**
31325 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
31326 *
31327 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
31328 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
31329 * @function handleVhsMediaChange
31330 */
31331
31332 var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
31333 var newPlaylist = playlistLoader.media();
31334 var selectedIndex = -1;
31335
31336 for (var i = 0; i < qualityLevels.length; i++) {
31337 if (qualityLevels[i].id === newPlaylist.id) {
31338 selectedIndex = i;
31339 break;
31340 }
31341 }
31342
31343 qualityLevels.selectedIndex_ = selectedIndex;
31344 qualityLevels.trigger({
31345 selectedIndex: selectedIndex,
31346 type: 'change'
31347 });
31348 };
31349 /**
31350 * Adds quality levels to list once playlist metadata is available
31351 *
31352 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
31353 * @param {Object} vhs Vhs object to listen to for media events.
31354 * @function handleVhsLoadedMetadata
31355 */
31356
31357
31358 var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
31359 vhs.representations().forEach(function (rep) {
31360 qualityLevels.addQualityLevel(rep);
31361 });
31362 handleVhsMediaChange(qualityLevels, vhs.playlists);
31363 }; // HLS is a source handler, not a tech. Make sure attempts to use it
31364 // as one do not cause exceptions.
31365
31366
31367 Vhs.canPlaySource = function () {
31368 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31369 };
31370
31371 var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
31372 if (!keySystemOptions) {
31373 return keySystemOptions;
31374 }
31375
31376 var codecs = {};
31377
31378 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
31379 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
31380 }
31381
31382 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
31383 codecs.audio = audioPlaylist.attributes.CODECS;
31384 }
31385
31386 var videoContentType = getMimeForCodec(codecs.video);
31387 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
31388
31389 var keySystemContentTypes = {};
31390
31391 for (var keySystem in keySystemOptions) {
31392 keySystemContentTypes[keySystem] = {};
31393
31394 if (audioContentType) {
31395 keySystemContentTypes[keySystem].audioContentType = audioContentType;
31396 }
31397
31398 if (videoContentType) {
31399 keySystemContentTypes[keySystem].videoContentType = videoContentType;
31400 } // Default to using the video playlist's PSSH even though they may be different, as
31401 // videojs-contrib-eme will only accept one in the options.
31402 //
31403 // This shouldn't be an issue for most cases as early intialization will handle all
31404 // unique PSSH values, and if they aren't, then encrypted events should have the
31405 // specific information needed for the unique license.
31406
31407
31408 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
31409 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
31410 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
31411 // so we need to prevent overwriting the URL entirely
31412
31413
31414 if (typeof keySystemOptions[keySystem] === 'string') {
31415 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
31416 }
31417 }
31418
31419 return videojs__default["default"].mergeOptions(keySystemOptions, keySystemContentTypes);
31420 };
31421 /**
31422 * @typedef {Object} KeySystems
31423 *
31424 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
31425 * Note: not all options are listed here.
31426 *
31427 * @property {Uint8Array} [pssh]
31428 * Protection System Specific Header
31429 */
31430
31431 /**
31432 * Goes through all the playlists and collects an array of KeySystems options objects
31433 * containing each playlist's keySystems and their pssh values, if available.
31434 *
31435 * @param {Object[]} playlists
31436 * The playlists to look through
31437 * @param {string[]} keySystems
31438 * The keySystems to collect pssh values for
31439 *
31440 * @return {KeySystems[]}
31441 * An array of KeySystems objects containing available key systems and their
31442 * pssh values
31443 */
31444
31445
31446 var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
31447 return playlists.reduce(function (keySystemsArr, playlist) {
31448 if (!playlist.contentProtection) {
31449 return keySystemsArr;
31450 }
31451
31452 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
31453 var keySystemOptions = playlist.contentProtection[keySystem];
31454
31455 if (keySystemOptions && keySystemOptions.pssh) {
31456 keySystemsObj[keySystem] = {
31457 pssh: keySystemOptions.pssh
31458 };
31459 }
31460
31461 return keySystemsObj;
31462 }, {});
31463
31464 if (Object.keys(keySystemsOptions).length) {
31465 keySystemsArr.push(keySystemsOptions);
31466 }
31467
31468 return keySystemsArr;
31469 }, []);
31470 };
31471 /**
31472 * Returns a promise that waits for the
31473 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
31474 *
31475 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
31476 * browsers.
31477 *
31478 * As per the above ticket, this is particularly important for Chrome, where, if
31479 * unencrypted content is appended before encrypted content and the key session has not
31480 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
31481 * during playback.
31482 *
31483 * @param {Object} player
31484 * The player instance
31485 * @param {Object[]} sourceKeySystems
31486 * The key systems options from the player source
31487 * @param {Object} [audioMedia]
31488 * The active audio media playlist (optional)
31489 * @param {Object[]} mainPlaylists
31490 * The playlists found on the master playlist object
31491 *
31492 * @return {Object}
31493 * Promise that resolves when the key session has been created
31494 */
31495
31496
31497 var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
31498 var player = _ref.player,
31499 sourceKeySystems = _ref.sourceKeySystems,
31500 audioMedia = _ref.audioMedia,
31501 mainPlaylists = _ref.mainPlaylists;
31502
31503 if (!player.eme.initializeMediaKeys) {
31504 return Promise.resolve();
31505 } // TODO should all audio PSSH values be initialized for DRM?
31506 //
31507 // All unique video rendition pssh values are initialized for DRM, but here only
31508 // the initial audio playlist license is initialized. In theory, an encrypted
31509 // event should be fired if the user switches to an alternative audio playlist
31510 // where a license is required, but this case hasn't yet been tested. In addition, there
31511 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
31512 // languages).
31513
31514
31515 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
31516 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
31517 var initializationFinishedPromises = [];
31518 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
31519 // only place where it should not be deduped is for ms-prefixed APIs, but the early
31520 // return for IE11 above, and the existence of modern EME APIs in addition to
31521 // ms-prefixed APIs on Edge should prevent this from being a concern.
31522 // initializeMediaKeys also won't use the webkit-prefixed APIs.
31523
31524 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
31525 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
31526 player.tech_.one('keysessioncreated', resolve);
31527 }));
31528 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
31529 player.eme.initializeMediaKeys({
31530 keySystems: keySystemsOptions
31531 }, function (err) {
31532 if (err) {
31533 reject(err);
31534 return;
31535 }
31536
31537 resolve();
31538 });
31539 }));
31540 }); // The reasons Promise.race is chosen over Promise.any:
31541 //
31542 // * Promise.any is only available in Safari 14+.
31543 // * None of these promises are expected to reject. If they do reject, it might be
31544 // better here for the race to surface the rejection, rather than mask it by using
31545 // Promise.any.
31546
31547 return Promise.race([// If a session was previously created, these will all finish resolving without
31548 // creating a new session, otherwise it will take until the end of all license
31549 // requests, which is why the key session check is used (to make setup much faster).
31550 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
31551 Promise.race(keySessionCreatedPromises)]);
31552 };
31553 /**
31554 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
31555 * there are keySystems on the source, sets up source options to prepare the source for
31556 * eme.
31557 *
31558 * @param {Object} player
31559 * The player instance
31560 * @param {Object[]} sourceKeySystems
31561 * The key systems options from the player source
31562 * @param {Object} media
31563 * The active media playlist
31564 * @param {Object} [audioMedia]
31565 * The active audio media playlist (optional)
31566 *
31567 * @return {boolean}
31568 * Whether or not options were configured and EME is available
31569 */
31570
31571 var setupEmeOptions = function setupEmeOptions(_ref2) {
31572 var player = _ref2.player,
31573 sourceKeySystems = _ref2.sourceKeySystems,
31574 media = _ref2.media,
31575 audioMedia = _ref2.audioMedia;
31576 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
31577
31578 if (!sourceOptions) {
31579 return false;
31580 }
31581
31582 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
31583 // do nothing.
31584
31585 if (sourceOptions && !player.eme) {
31586 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
31587 return false;
31588 }
31589
31590 return true;
31591 };
31592
31593 var getVhsLocalStorage = function getVhsLocalStorage() {
31594 if (!window.localStorage) {
31595 return null;
31596 }
31597
31598 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
31599
31600 if (!storedObject) {
31601 return null;
31602 }
31603
31604 try {
31605 return JSON.parse(storedObject);
31606 } catch (e) {
31607 // someone may have tampered with the value
31608 return null;
31609 }
31610 };
31611
31612 var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
31613 if (!window.localStorage) {
31614 return false;
31615 }
31616
31617 var objectToStore = getVhsLocalStorage();
31618 objectToStore = objectToStore ? videojs__default["default"].mergeOptions(objectToStore, options) : options;
31619
31620 try {
31621 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
31622 } catch (e) {
31623 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
31624 // storage is set to 0).
31625 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
31626 // No need to perform any operation.
31627 return false;
31628 }
31629
31630 return objectToStore;
31631 };
31632 /**
31633 * Parses VHS-supported media types from data URIs. See
31634 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
31635 * for information on data URIs.
31636 *
31637 * @param {string} dataUri
31638 * The data URI
31639 *
31640 * @return {string|Object}
31641 * The parsed object/string, or the original string if no supported media type
31642 * was found
31643 */
31644
31645
31646 var expandDataUri = function expandDataUri(dataUri) {
31647 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
31648 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
31649 } // no known case for this data URI, return the string as-is
31650
31651
31652 return dataUri;
31653 };
31654 /**
31655 * Whether the browser has built-in HLS support.
31656 */
31657
31658
31659 Vhs.supportsNativeHls = function () {
31660 if (!document || !document.createElement) {
31661 return false;
31662 }
31663
31664 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
31665
31666 if (!videojs__default["default"].getTech('Html5').isSupported()) {
31667 return false;
31668 } // HLS manifests can go by many mime-types
31669
31670
31671 var canPlay = [// Apple santioned
31672 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
31673 'audio/mpegurl', // Very common
31674 'audio/x-mpegurl', // Very common
31675 'application/x-mpegurl', // Included for completeness
31676 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
31677 return canPlay.some(function (canItPlay) {
31678 return /maybe|probably/i.test(video.canPlayType(canItPlay));
31679 });
31680 }();
31681
31682 Vhs.supportsNativeDash = function () {
31683 if (!document || !document.createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
31684 return false;
31685 }
31686
31687 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
31688 }();
31689
31690 Vhs.supportsTypeNatively = function (type) {
31691 if (type === 'hls') {
31692 return Vhs.supportsNativeHls;
31693 }
31694
31695 if (type === 'dash') {
31696 return Vhs.supportsNativeDash;
31697 }
31698
31699 return false;
31700 };
31701 /**
31702 * HLS is a source handler, not a tech. Make sure attempts to use it
31703 * as one do not cause exceptions.
31704 */
31705
31706
31707 Vhs.isSupported = function () {
31708 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
31709 };
31710
31711 var Component = videojs__default["default"].getComponent('Component');
31712 /**
31713 * The Vhs Handler object, where we orchestrate all of the parts
31714 * of HLS to interact with video.js
31715 *
31716 * @class VhsHandler
31717 * @extends videojs.Component
31718 * @param {Object} source the soruce object
31719 * @param {Tech} tech the parent tech object
31720 * @param {Object} options optional and required options
31721 */
31722
31723 var VhsHandler = /*#__PURE__*/function (_Component) {
31724 inheritsLoose(VhsHandler, _Component);
31725
31726 function VhsHandler(source, tech, options) {
31727 var _this;
31728
31729 _this = _Component.call(this, tech, videojs__default["default"].mergeOptions(options.hls, options.vhs)) || this;
31730
31731 if (options.hls && Object.keys(options.hls).length) {
31732 videojs__default["default"].log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
31733 } // if a tech level `initialBandwidth` option was passed
31734 // use that over the VHS level `bandwidth` option
31735
31736
31737 if (typeof options.initialBandwidth === 'number') {
31738 _this.options_.bandwidth = options.initialBandwidth;
31739 }
31740
31741 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
31742 // backwards-compatibility
31743
31744 if (tech.options_ && tech.options_.playerId) {
31745 var _player = videojs__default["default"](tech.options_.playerId);
31746
31747 if (!_player.hasOwnProperty('hls')) {
31748 Object.defineProperty(_player, 'hls', {
31749 get: function get() {
31750 videojs__default["default"].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
31751 tech.trigger({
31752 type: 'usage',
31753 name: 'hls-player-access'
31754 });
31755 return assertThisInitialized(_this);
31756 },
31757 configurable: true
31758 });
31759 }
31760
31761 if (!_player.hasOwnProperty('vhs')) {
31762 Object.defineProperty(_player, 'vhs', {
31763 get: function get() {
31764 videojs__default["default"].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
31765 tech.trigger({
31766 type: 'usage',
31767 name: 'vhs-player-access'
31768 });
31769 return assertThisInitialized(_this);
31770 },
31771 configurable: true
31772 });
31773 }
31774
31775 if (!_player.hasOwnProperty('dash')) {
31776 Object.defineProperty(_player, 'dash', {
31777 get: function get() {
31778 videojs__default["default"].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
31779 return assertThisInitialized(_this);
31780 },
31781 configurable: true
31782 });
31783 }
31784
31785 _this.player_ = _player;
31786 }
31787
31788 _this.tech_ = tech;
31789 _this.source_ = source;
31790 _this.stats = {};
31791 _this.ignoreNextSeekingEvent_ = false;
31792
31793 _this.setOptions_();
31794
31795 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
31796 tech.overrideNativeAudioTracks(true);
31797 tech.overrideNativeVideoTracks(true);
31798 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
31799 // overriding native HLS only works if audio tracks have been emulated
31800 // error early if we're misconfigured
31801 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
31802 } // listen for fullscreenchange events for this player so that we
31803 // can adjust our quality selection quickly
31804
31805
31806 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
31807 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
31808
31809 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
31810 _this.masterPlaylistController_.fastQualityChange_();
31811 } else {
31812 // When leaving fullscreen, since the in page pixel dimensions should be smaller
31813 // than full screen, see if there should be a rendition switch down to preserve
31814 // bandwidth.
31815 _this.masterPlaylistController_.checkABR_();
31816 }
31817 });
31818
31819 _this.on(_this.tech_, 'seeking', function () {
31820 if (this.ignoreNextSeekingEvent_) {
31821 this.ignoreNextSeekingEvent_ = false;
31822 return;
31823 }
31824
31825 this.setCurrentTime(this.tech_.currentTime());
31826 });
31827
31828 _this.on(_this.tech_, 'error', function () {
31829 // verify that the error was real and we are loaded
31830 // enough to have mpc loaded.
31831 if (this.tech_.error() && this.masterPlaylistController_) {
31832 this.masterPlaylistController_.pauseLoading();
31833 }
31834 });
31835
31836 _this.on(_this.tech_, 'play', _this.play);
31837
31838 return _this;
31839 }
31840
31841 var _proto = VhsHandler.prototype;
31842
31843 _proto.setOptions_ = function setOptions_() {
31844 var _this2 = this;
31845
31846 // defaults
31847 this.options_.withCredentials = this.options_.withCredentials || false;
31848 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
31849 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
31850 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
31851 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
31852 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
31853 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
31854 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
31855 this.options_.customTagParsers = this.options_.customTagParsers || [];
31856 this.options_.customTagMappers = this.options_.customTagMappers || [];
31857 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
31858
31859 if (typeof this.options_.blacklistDuration !== 'number') {
31860 this.options_.blacklistDuration = 5 * 60;
31861 }
31862
31863 if (typeof this.options_.bandwidth !== 'number') {
31864 if (this.options_.useBandwidthFromLocalStorage) {
31865 var storedObject = getVhsLocalStorage();
31866
31867 if (storedObject && storedObject.bandwidth) {
31868 this.options_.bandwidth = storedObject.bandwidth;
31869 this.tech_.trigger({
31870 type: 'usage',
31871 name: 'vhs-bandwidth-from-local-storage'
31872 });
31873 this.tech_.trigger({
31874 type: 'usage',
31875 name: 'hls-bandwidth-from-local-storage'
31876 });
31877 }
31878
31879 if (storedObject && storedObject.throughput) {
31880 this.options_.throughput = storedObject.throughput;
31881 this.tech_.trigger({
31882 type: 'usage',
31883 name: 'vhs-throughput-from-local-storage'
31884 });
31885 this.tech_.trigger({
31886 type: 'usage',
31887 name: 'hls-throughput-from-local-storage'
31888 });
31889 }
31890 }
31891 } // if bandwidth was not set by options or pulled from local storage, start playlist
31892 // selection at a reasonable bandwidth
31893
31894
31895 if (typeof this.options_.bandwidth !== 'number') {
31896 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
31897 } // If the bandwidth number is unchanged from the initial setting
31898 // then this takes precedence over the enableLowInitialPlaylist option
31899
31900
31901 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
31902
31903 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
31904 if (typeof _this2.source_[option] !== 'undefined') {
31905 _this2.options_[option] = _this2.source_[option];
31906 }
31907 });
31908 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
31909 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
31910 }
31911 /**
31912 * called when player.src gets called, handle a new source
31913 *
31914 * @param {Object} src the source object to handle
31915 */
31916 ;
31917
31918 _proto.src = function src(_src, type) {
31919 var _this3 = this;
31920
31921 // do nothing if the src is falsey
31922 if (!_src) {
31923 return;
31924 }
31925
31926 this.setOptions_(); // add master playlist controller options
31927
31928 this.options_.src = expandDataUri(this.source_.src);
31929 this.options_.tech = this.tech_;
31930 this.options_.externVhs = Vhs;
31931 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
31932
31933 this.options_.seekTo = function (time) {
31934 _this3.tech_.setCurrentTime(time);
31935 };
31936
31937 if (this.options_.smoothQualityChange) {
31938 videojs__default["default"].log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
31939 }
31940
31941 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
31942 var playbackWatcherOptions = videojs__default["default"].mergeOptions({
31943 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
31944 }, this.options_, {
31945 seekable: function seekable() {
31946 return _this3.seekable();
31947 },
31948 media: function media() {
31949 return _this3.masterPlaylistController_.media();
31950 },
31951 masterPlaylistController: this.masterPlaylistController_
31952 });
31953 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
31954 this.masterPlaylistController_.on('error', function () {
31955 var player = videojs__default["default"].players[_this3.tech_.options_.playerId];
31956 var error = _this3.masterPlaylistController_.error;
31957
31958 if (typeof error === 'object' && !error.code) {
31959 error.code = 3;
31960 } else if (typeof error === 'string') {
31961 error = {
31962 message: error,
31963 code: 3
31964 };
31965 }
31966
31967 player.error(error);
31968 });
31969 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
31970 // compatibility with < v2
31971
31972 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
31973 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
31974
31975 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
31976 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
31977 // controller. Using a custom property for backwards compatibility
31978 // with < v2
31979
31980 Object.defineProperties(this, {
31981 selectPlaylist: {
31982 get: function get() {
31983 return this.masterPlaylistController_.selectPlaylist;
31984 },
31985 set: function set(selectPlaylist) {
31986 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
31987 }
31988 },
31989 throughput: {
31990 get: function get() {
31991 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
31992 },
31993 set: function set(throughput) {
31994 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
31995 // for the cumulative average
31996
31997 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
31998 }
31999 },
32000 bandwidth: {
32001 get: function get() {
32002 var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
32003 var networkInformation = window.navigator.connection || window.navigator.mozConnection || window.navigator.webkitConnection;
32004 var tenMbpsAsBitsPerSecond = 10e6;
32005
32006 if (this.options_.useNetworkInformationApi && networkInformation) {
32007 // downlink returns Mbps
32008 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
32009 var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
32010 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
32011 // high quality streams are not filtered out.
32012
32013 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
32014 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
32015 } else {
32016 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
32017 }
32018 }
32019
32020 return playerBandwidthEst;
32021 },
32022 set: function set(bandwidth) {
32023 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
32024 // `count` is set to zero that current value of `rate` isn't included
32025 // in the cumulative average
32026
32027 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
32028 rate: 0,
32029 count: 0
32030 };
32031 }
32032 },
32033
32034 /**
32035 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
32036 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
32037 * the entire process after that - decryption, transmuxing, and appending - provided
32038 * by `throughput`.
32039 *
32040 * Since the two process are serial, the overall system bandwidth is given by:
32041 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
32042 */
32043 systemBandwidth: {
32044 get: function get() {
32045 var invBandwidth = 1 / (this.bandwidth || 1);
32046 var invThroughput;
32047
32048 if (this.throughput > 0) {
32049 invThroughput = 1 / this.throughput;
32050 } else {
32051 invThroughput = 0;
32052 }
32053
32054 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
32055 return systemBitrate;
32056 },
32057 set: function set() {
32058 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
32059 }
32060 }
32061 });
32062
32063 if (this.options_.bandwidth) {
32064 this.bandwidth = this.options_.bandwidth;
32065 }
32066
32067 if (this.options_.throughput) {
32068 this.throughput = this.options_.throughput;
32069 }
32070
32071 Object.defineProperties(this.stats, {
32072 bandwidth: {
32073 get: function get() {
32074 return _this3.bandwidth || 0;
32075 },
32076 enumerable: true
32077 },
32078 mediaRequests: {
32079 get: function get() {
32080 return _this3.masterPlaylistController_.mediaRequests_() || 0;
32081 },
32082 enumerable: true
32083 },
32084 mediaRequestsAborted: {
32085 get: function get() {
32086 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
32087 },
32088 enumerable: true
32089 },
32090 mediaRequestsTimedout: {
32091 get: function get() {
32092 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
32093 },
32094 enumerable: true
32095 },
32096 mediaRequestsErrored: {
32097 get: function get() {
32098 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
32099 },
32100 enumerable: true
32101 },
32102 mediaTransferDuration: {
32103 get: function get() {
32104 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
32105 },
32106 enumerable: true
32107 },
32108 mediaBytesTransferred: {
32109 get: function get() {
32110 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
32111 },
32112 enumerable: true
32113 },
32114 mediaSecondsLoaded: {
32115 get: function get() {
32116 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
32117 },
32118 enumerable: true
32119 },
32120 mediaAppends: {
32121 get: function get() {
32122 return _this3.masterPlaylistController_.mediaAppends_() || 0;
32123 },
32124 enumerable: true
32125 },
32126 mainAppendsToLoadedData: {
32127 get: function get() {
32128 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
32129 },
32130 enumerable: true
32131 },
32132 audioAppendsToLoadedData: {
32133 get: function get() {
32134 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
32135 },
32136 enumerable: true
32137 },
32138 appendsToLoadedData: {
32139 get: function get() {
32140 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
32141 },
32142 enumerable: true
32143 },
32144 timeToLoadedData: {
32145 get: function get() {
32146 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
32147 },
32148 enumerable: true
32149 },
32150 buffered: {
32151 get: function get() {
32152 return timeRangesToArray(_this3.tech_.buffered());
32153 },
32154 enumerable: true
32155 },
32156 currentTime: {
32157 get: function get() {
32158 return _this3.tech_.currentTime();
32159 },
32160 enumerable: true
32161 },
32162 currentSource: {
32163 get: function get() {
32164 return _this3.tech_.currentSource_;
32165 },
32166 enumerable: true
32167 },
32168 currentTech: {
32169 get: function get() {
32170 return _this3.tech_.name_;
32171 },
32172 enumerable: true
32173 },
32174 duration: {
32175 get: function get() {
32176 return _this3.tech_.duration();
32177 },
32178 enumerable: true
32179 },
32180 master: {
32181 get: function get() {
32182 return _this3.playlists.master;
32183 },
32184 enumerable: true
32185 },
32186 playerDimensions: {
32187 get: function get() {
32188 return _this3.tech_.currentDimensions();
32189 },
32190 enumerable: true
32191 },
32192 seekable: {
32193 get: function get() {
32194 return timeRangesToArray(_this3.tech_.seekable());
32195 },
32196 enumerable: true
32197 },
32198 timestamp: {
32199 get: function get() {
32200 return Date.now();
32201 },
32202 enumerable: true
32203 },
32204 videoPlaybackQuality: {
32205 get: function get() {
32206 return _this3.tech_.getVideoPlaybackQuality();
32207 },
32208 enumerable: true
32209 }
32210 });
32211 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
32212 this.tech_.on('bandwidthupdate', function () {
32213 if (_this3.options_.useBandwidthFromLocalStorage) {
32214 updateVhsLocalStorage({
32215 bandwidth: _this3.bandwidth,
32216 throughput: Math.round(_this3.throughput)
32217 });
32218 }
32219 });
32220 this.masterPlaylistController_.on('selectedinitialmedia', function () {
32221 // Add the manual rendition mix-in to VhsHandler
32222 renditionSelectionMixin(_this3);
32223 });
32224 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
32225 _this3.setupEme_();
32226 }); // the bandwidth of the primary segment loader is our best
32227 // estimate of overall bandwidth
32228
32229 this.on(this.masterPlaylistController_, 'progress', function () {
32230 this.tech_.trigger('progress');
32231 }); // In the live case, we need to ignore the very first `seeking` event since
32232 // that will be the result of the seek-to-live behavior
32233
32234 this.on(this.masterPlaylistController_, 'firstplay', function () {
32235 this.ignoreNextSeekingEvent_ = true;
32236 });
32237 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
32238 // this can occur if someone sets the src in player.ready(), for instance
32239
32240 if (!this.tech_.el()) {
32241 return;
32242 }
32243
32244 this.mediaSourceUrl_ = window.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
32245 this.tech_.src(this.mediaSourceUrl_);
32246 };
32247
32248 _proto.createKeySessions_ = function createKeySessions_() {
32249 var _this4 = this;
32250
32251 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32252 this.logger_('waiting for EME key session creation');
32253 waitForKeySessionCreation({
32254 player: this.player_,
32255 sourceKeySystems: this.source_.keySystems,
32256 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
32257 mainPlaylists: this.playlists.master.playlists
32258 }).then(function () {
32259 _this4.logger_('created EME key session');
32260
32261 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
32262 }).catch(function (err) {
32263 _this4.logger_('error while creating EME key session', err);
32264
32265 _this4.player_.error({
32266 message: 'Failed to initialize media keys for EME',
32267 code: 3
32268 });
32269 });
32270 };
32271
32272 _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
32273 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
32274 // the key is in the manifest. While this should've happened on initial source load, it
32275 // may happen again in live streams where the keys change, and the manifest info
32276 // reflects the update.
32277 //
32278 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
32279 // already requested keys for, we don't have to worry about this generating extraneous
32280 // requests.
32281 this.logger_('waitingforkey fired, attempting to create any new key sessions');
32282 this.createKeySessions_();
32283 }
32284 /**
32285 * If necessary and EME is available, sets up EME options and waits for key session
32286 * creation.
32287 *
32288 * This function also updates the source updater so taht it can be used, as for some
32289 * browsers, EME must be configured before content is appended (if appending unencrypted
32290 * content before encrypted content).
32291 */
32292 ;
32293
32294 _proto.setupEme_ = function setupEme_() {
32295 var _this5 = this;
32296
32297 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
32298 var didSetupEmeOptions = setupEmeOptions({
32299 player: this.player_,
32300 sourceKeySystems: this.source_.keySystems,
32301 media: this.playlists.media(),
32302 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
32303 });
32304 this.player_.tech_.on('keystatuschange', function (e) {
32305 if (e.status !== 'output-restricted') {
32306 return;
32307 }
32308
32309 var masterPlaylist = _this5.masterPlaylistController_.master();
32310
32311 if (!masterPlaylist || !masterPlaylist.playlists) {
32312 return;
32313 }
32314
32315 var excludedHDPlaylists = []; // Assume all HD streams are unplayable and exclude them from ABR selection
32316
32317 masterPlaylist.playlists.forEach(function (playlist) {
32318 if (playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height >= 720) {
32319 if (!playlist.excludeUntil || playlist.excludeUntil < Infinity) {
32320 playlist.excludeUntil = Infinity;
32321 excludedHDPlaylists.push(playlist);
32322 }
32323 }
32324 });
32325
32326 if (excludedHDPlaylists.length) {
32327 var _videojs$log;
32328
32329 (_videojs$log = videojs__default["default"].log).warn.apply(_videojs$log, ['DRM keystatus changed to "output-restricted." Removing the following HD playlists ' + 'that will most likely fail to play and clearing the buffer. ' + 'This may be due to HDCP restrictions on the stream and the capabilities of the current device.'].concat(excludedHDPlaylists)); // Clear the buffer before switching playlists, since it may already contain unplayable segments
32330
32331
32332 _this5.masterPlaylistController_.fastQualityChange_();
32333 }
32334 });
32335 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
32336 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
32337 // promises.
32338
32339 if (videojs__default["default"].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
32340 // If EME options were not set up, we've done all we could to initialize EME.
32341 this.masterPlaylistController_.sourceUpdater_.initializedEme();
32342 return;
32343 }
32344
32345 this.createKeySessions_();
32346 }
32347 /**
32348 * Initializes the quality levels and sets listeners to update them.
32349 *
32350 * @method setupQualityLevels_
32351 * @private
32352 */
32353 ;
32354
32355 _proto.setupQualityLevels_ = function setupQualityLevels_() {
32356 var _this6 = this;
32357
32358 var player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
32359 // or qualityLevels_ listeners have already been setup, do nothing.
32360
32361 if (!player || !player.qualityLevels || this.qualityLevels_) {
32362 return;
32363 }
32364
32365 this.qualityLevels_ = player.qualityLevels();
32366 this.masterPlaylistController_.on('selectedinitialmedia', function () {
32367 handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
32368 });
32369 this.playlists.on('mediachange', function () {
32370 handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
32371 });
32372 }
32373 /**
32374 * return the version
32375 */
32376 ;
32377
32378 VhsHandler.version = function version$5() {
32379 return {
32380 '@videojs/http-streaming': version$4,
32381 'mux.js': version$3,
32382 'mpd-parser': version$2,
32383 'm3u8-parser': version$1,
32384 'aes-decrypter': version
32385 };
32386 }
32387 /**
32388 * return the version
32389 */
32390 ;
32391
32392 _proto.version = function version() {
32393 return this.constructor.version();
32394 };
32395
32396 _proto.canChangeType = function canChangeType() {
32397 return SourceUpdater.canChangeType();
32398 }
32399 /**
32400 * Begin playing the video.
32401 */
32402 ;
32403
32404 _proto.play = function play() {
32405 this.masterPlaylistController_.play();
32406 }
32407 /**
32408 * a wrapper around the function in MasterPlaylistController
32409 */
32410 ;
32411
32412 _proto.setCurrentTime = function setCurrentTime(currentTime) {
32413 this.masterPlaylistController_.setCurrentTime(currentTime);
32414 }
32415 /**
32416 * a wrapper around the function in MasterPlaylistController
32417 */
32418 ;
32419
32420 _proto.duration = function duration() {
32421 return this.masterPlaylistController_.duration();
32422 }
32423 /**
32424 * a wrapper around the function in MasterPlaylistController
32425 */
32426 ;
32427
32428 _proto.seekable = function seekable() {
32429 return this.masterPlaylistController_.seekable();
32430 }
32431 /**
32432 * Abort all outstanding work and cleanup.
32433 */
32434 ;
32435
32436 _proto.dispose = function dispose() {
32437 if (this.playbackWatcher_) {
32438 this.playbackWatcher_.dispose();
32439 }
32440
32441 if (this.masterPlaylistController_) {
32442 this.masterPlaylistController_.dispose();
32443 }
32444
32445 if (this.qualityLevels_) {
32446 this.qualityLevels_.dispose();
32447 }
32448
32449 if (this.player_) {
32450 delete this.player_.vhs;
32451 delete this.player_.dash;
32452 delete this.player_.hls;
32453 }
32454
32455 if (this.tech_ && this.tech_.vhs) {
32456 delete this.tech_.vhs;
32457 } // don't check this.tech_.hls as it will log a deprecated warning
32458
32459
32460 if (this.tech_) {
32461 delete this.tech_.hls;
32462 }
32463
32464 if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
32465 window.URL.revokeObjectURL(this.mediaSourceUrl_);
32466 this.mediaSourceUrl_ = null;
32467 }
32468
32469 if (this.tech_) {
32470 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
32471 }
32472
32473 _Component.prototype.dispose.call(this);
32474 };
32475
32476 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
32477 return getProgramTime({
32478 playlist: this.masterPlaylistController_.media(),
32479 time: time,
32480 callback: callback
32481 });
32482 } // the player must be playing before calling this
32483 ;
32484
32485 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
32486 if (pauseAfterSeek === void 0) {
32487 pauseAfterSeek = true;
32488 }
32489
32490 if (retryCount === void 0) {
32491 retryCount = 2;
32492 }
32493
32494 return seekToProgramTime({
32495 programTime: programTime,
32496 playlist: this.masterPlaylistController_.media(),
32497 retryCount: retryCount,
32498 pauseAfterSeek: pauseAfterSeek,
32499 seekTo: this.options_.seekTo,
32500 tech: this.options_.tech,
32501 callback: callback
32502 });
32503 };
32504
32505 return VhsHandler;
32506 }(Component);
32507 /**
32508 * The Source Handler object, which informs video.js what additional
32509 * MIME types are supported and sets up playback. It is registered
32510 * automatically to the appropriate tech based on the capabilities of
32511 * the browser it is running in. It is not necessary to use or modify
32512 * this object in normal usage.
32513 */
32514
32515
32516 var VhsSourceHandler = {
32517 name: 'videojs-http-streaming',
32518 VERSION: version$4,
32519 canHandleSource: function canHandleSource(srcObj, options) {
32520 if (options === void 0) {
32521 options = {};
32522 }
32523
32524 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32525 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
32526 },
32527 handleSource: function handleSource(source, tech, options) {
32528 if (options === void 0) {
32529 options = {};
32530 }
32531
32532 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
32533 tech.vhs = new VhsHandler(source, tech, localOptions);
32534
32535 if (!videojs__default["default"].hasOwnProperty('hls')) {
32536 Object.defineProperty(tech, 'hls', {
32537 get: function get() {
32538 videojs__default["default"].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
32539 return tech.vhs;
32540 },
32541 configurable: true
32542 });
32543 }
32544
32545 tech.vhs.xhr = xhrFactory();
32546 tech.vhs.src(source.src, source.type);
32547 return tech.vhs;
32548 },
32549 canPlayType: function canPlayType(type, options) {
32550 var simpleType = simpleTypeFromSourceType(type);
32551
32552 if (!simpleType) {
32553 return '';
32554 }
32555
32556 var overrideNative = VhsSourceHandler.getOverrideNative(options);
32557 var supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);
32558 var canUseMsePlayback = !supportsTypeNatively || overrideNative;
32559 return canUseMsePlayback ? 'maybe' : '';
32560 },
32561 getOverrideNative: function getOverrideNative(options) {
32562 if (options === void 0) {
32563 options = {};
32564 }
32565
32566 var _options = options,
32567 _options$vhs = _options.vhs,
32568 vhs = _options$vhs === void 0 ? {} : _options$vhs,
32569 _options$hls = _options.hls,
32570 hls = _options$hls === void 0 ? {} : _options$hls;
32571 var defaultOverrideNative = !(videojs__default["default"].browser.IS_ANY_SAFARI || videojs__default["default"].browser.IS_IOS);
32572 var _vhs$overrideNative = vhs.overrideNative,
32573 overrideNative = _vhs$overrideNative === void 0 ? defaultOverrideNative : _vhs$overrideNative;
32574 var _hls$overrideNative = hls.overrideNative,
32575 legacyOverrideNative = _hls$overrideNative === void 0 ? false : _hls$overrideNative;
32576 return legacyOverrideNative || overrideNative;
32577 }
32578 };
32579 /**
32580 * Check to see if the native MediaSource object exists and supports
32581 * an MP4 container with both H.264 video and AAC-LC audio.
32582 *
32583 * @return {boolean} if native media sources are supported
32584 */
32585
32586 var supportsNativeMediaSources = function supportsNativeMediaSources() {
32587 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
32588 }; // register source handlers with the appropriate techs
32589
32590
32591 if (supportsNativeMediaSources()) {
32592 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
32593 }
32594
32595 videojs__default["default"].VhsHandler = VhsHandler;
32596 Object.defineProperty(videojs__default["default"], 'HlsHandler', {
32597 get: function get() {
32598 videojs__default["default"].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
32599 return VhsHandler;
32600 },
32601 configurable: true
32602 });
32603 videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
32604 Object.defineProperty(videojs__default["default"], 'HlsSourceHandler', {
32605 get: function get() {
32606 videojs__default["default"].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
32607 return VhsSourceHandler;
32608 },
32609 configurable: true
32610 });
32611 videojs__default["default"].Vhs = Vhs;
32612 Object.defineProperty(videojs__default["default"], 'Hls', {
32613 get: function get() {
32614 videojs__default["default"].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
32615 return Vhs;
32616 },
32617 configurable: true
32618 });
32619
32620 if (!videojs__default["default"].use) {
32621 videojs__default["default"].registerComponent('Hls', Vhs);
32622 videojs__default["default"].registerComponent('Vhs', Vhs);
32623 }
32624
32625 videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
32626 videojs__default["default"].options.hls = videojs__default["default"].options.hls || {};
32627
32628 if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
32629 var registerPlugin = videojs__default["default"].registerPlugin || videojs__default["default"].plugin;
32630 registerPlugin('reloadSourceOnError', reloadSourceOnError);
32631 }
32632
32633 exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
32634 exports.Vhs = Vhs;
32635 exports.VhsHandler = VhsHandler;
32636 exports.VhsSourceHandler = VhsSourceHandler;
32637 exports.emeKeySystems = emeKeySystems;
32638 exports.expandDataUri = expandDataUri;
32639 exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
32640 exports.setupEmeOptions = setupEmeOptions;
32641 exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
32642 exports.waitForKeySessionCreation = waitForKeySessionCreation;
32643
32644 Object.defineProperty(exports, '__esModule', { value: true });
32645
32646}));